[
  {
    "path": ".dockerignore",
    "content": "# Frontend\n.github\ne2e\nfrontend/Dockerfile\nfrontend/**/node_modules\nfrontend/app/.next\nfrontend/app/.swc\nfrontend/packages/*/dist\n"
  },
  {
    "path": ".github/actions/decide/.gitignore",
    "content": "node_modules"
  },
  {
    "path": ".github/actions/decide/action.yml",
    "content": "name: 'Decide action'\ndescription: \"See issue https://github.com/pingcap/tidb.ai/issues/314\"\ninputs:\n  pr-e2e-frontend-label-prefix:\n    description: \"Defaults to 'e2e-frontend:'.\"\n    required: true\n    default: \"e2e-frontend:\"\n  pr-e2e-backend-label-prefix:\n    description: \"Defaults to 'e2e-frontend:'.\"\n    required: true\n    default: \"e2e-backend:\"\noutputs:\n  should-build-frontend:\n    description: Should build frontend\n  should-build-backend:\n    description: Should build backend\n  e2e-frontend:\n    description: Frontend docker version\n  e2e-backend:\n    description: Frontend docker version\nruns:\n  using: 'node20'\n  main: 'index.js'\n"
  },
  {
    "path": ".github/actions/decide/index.js",
    "content": "import * as core from '@actions/core';\nimport { context, } from '@actions/github';\n\nconst FRONTEND_PREFIX = core.getInput('pr-e2e-frontend-label-prefix', { required: true });\nconst BACKEND_PREFIX = core.getInput('pr-e2e-backend-label-prefix', { required: true });\n\nfunction run() {\n  const defaultImageVersion = `sha-${context.sha}-dev`\n\n  if (context.eventName !== 'pull_request') {\n    // Build and run E2E for all other events.\n    // TODO: Maybe handle commit message like \"feat(frontend-only): Some message\"\n\n    core.setOutput('should-build-frontend', true);\n    core.setOutput('should-build-backend', true);\n    core.setOutput('e2e-frontend', defaultImageVersion);\n    core.setOutput('e2e-backend', defaultImageVersion);\n    return;\n  }\n\n  let label\n  if (!!(label = findPRLabel(label => label.name.startsWith(FRONTEND_PREFIX)))) {\n    core.setOutput('should-build-frontend', false);\n    core.setOutput('e2e-frontend', label.name.slice(FRONTEND_PREFIX.length));\n    core.info(`E2E Frontend: ${label.name.slice(FRONTEND_PREFIX.length)}`);\n  } else {\n    core.setOutput('should-build-frontend', true);\n    core.setOutput('e2e-frontend', defaultImageVersion);\n  }\n  if (!!(label = findPRLabel(label => label.name.startsWith(BACKEND_PREFIX)))) {\n    core.setOutput('should-build-backend', false);\n    core.setOutput('e2e-backend', label.name.slice(BACKEND_PREFIX.length));\n    core.info(`E2E Backend: ${label.name.slice(BACKEND_PREFIX.length)}`);\n  } else {\n    core.setOutput('should-build-backend', true);\n    core.setOutput('e2e-backend', defaultImageVersion);\n  }\n}\n\nfunction findPRLabel(test) {\n  return context.payload.pull_request.labels.find(test);\n}\n\nrun();\n"
  },
  {
    "path": ".github/actions/decide/package.json",
    "content": "{\n  \"name\": \"@tidbai/ci-decide-action\",\n  \"version\": \"1.0.0\",\n  \"type\": \"module\",\n  \"module\": \"index.js\",\n  \"devDependencies\": {\n    \"@actions/core\": \"^1.11.1\",\n    \"@actions/github\": \"^6.0.0\"\n  }\n}\n"
  },
  {
    "path": ".github/workflows/backend-test.yml",
    "content": "name: Backend Test\n\non:\n  push:\n    branches:\n      - main\n    paths:\n      - backend/**\n  pull_request:\n    branches:\n      - main\n    paths:\n      - backend/**\n\njobs:\n  backend-test:\n    name: Backend Test\n    runs-on: ubuntu-latest\n    defaults:\n      run:\n        working-directory: ./backend\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n      - name: Install Ruff Format\n        uses: astral-sh/ruff-action@v3\n      - name: Run Ruff Check\n        run: ruff check\n      - name: Run Ruff Format\n        run: ruff format\n"
  },
  {
    "path": ".github/workflows/deploy.yml",
    "content": "name: Deploy to Production\non:\n    workflow_dispatch:\n\njobs:\n  build:\n    name: Build\n    runs-on: ubuntu-latest\n    steps:\n    - name: ssh and deploy\n      uses: appleboy/ssh-action@v1.0.3\n      with:\n        host: ${{ secrets.DEPLOY_HOST }}\n        username: ${{ secrets.DEPLOY_USERNAME }}\n        key: ${{ secrets.DEPLOY_SSH_KEY }}\n        port: ${{ secrets.DEPLOY_PORT }}\n        script: cd /home/ubuntu/py.tidb.ai-docker && docker compose up -d --force-recreate --pull always\n"
  },
  {
    "path": ".github/workflows/regression.yml",
    "content": "name: Regression Test\non:\n  workflow_dispatch:\n    inputs:\n      dataset:\n        description: 'Langfuse dataset to test'\n        required: true\n        default: 'regression'\n      tidbAIChatEngine:\n        description: 'TiDB AI Chat Engine to test'\n        required: true\n        default: 'default'\n      llmProvider:\n        description: 'llm provider'\n        required: true\n        default: 'openai'\n        type: choice\n        options:\n          - openai\n          - gemini\n\njobs:\n  build:\n    name: Build\n    runs-on: ubuntu-latest\n    steps:\n    - name: Checkout\n      uses: actions/checkout@v2\n    - name: Setup Python\n      uses: actions/setup-python@v4\n      with:\n        python-version: \"3.11\"\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install -r backend/requirements.lock\n    - name: Run tests\n      env:\n        ENVIRONMENT: \"production\"\n        LANGFUSE_HOST: ${{ secrets.LANGFUSE_HOST }}\n        LANGFUSE_PUBLIC_KEY: ${{ secrets.LANGFUSE_PUBLIC_KEY }}\n        LANGFUSE_SECRET_KEY: ${{ secrets.LANGFUSE_SECRET_KEY }}\n        TIDB_AI_CHAT_ENDPOINT: ${{ secrets.TIDB_AI_CHAT_ENDPOINT }}\n        TIDB_AI_API_KEY: ${{ secrets.TIDB_AI_API_KEY }}\n        GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}\n        OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}\n        TIDB_HOST: \"127.0.0.1\"\n        TIDB_USER: \"root\"\n        TIDB_PASSWORD: \"fake\"\n        TIDB_DATABASE: \"test\"\n      run: |\n        cd backend && python main.py runeval --llm-provider ${{ inputs.llmProvider }} --dataset ${{ inputs.dataset }} --tidb-ai-chat-engine ${{ inputs.tidbAIChatEngine }}\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Build and Publish\n\non:\n  pull_request:\n    branches:\n      - main\n    types:\n      - opened\n      - synchronize\n      - reopened\n      - labeled\n      - unlabeled\n  push:\n    branches:\n      - main\n    tags:\n      - '*'\n  workflow_dispatch:\n    inputs:\n      tag:\n        description: \"The tag to build the image with. Format *.*.*((beta|rc)*)?\"\n        required: true\n\nconcurrency:\n    group: ${{ github.workflow }}-${{ github.ref }}\n    cancel-in-progress: true\n\njobs:\n    docker-metadata:\n      name: Generate docker metadata\n      runs-on: ubuntu-latest\n      if: ${{ (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'require-build')) || (github.event_name != 'pull_request' && !cancelled()) }}\n      outputs:\n        tags: ${{steps.meta.outputs.tags}}\n        labels: ${{steps.meta.outputs.labels}}\n        annotations: ${{steps.meta.outputs.annotations}}\n        platforms: |\n          linux/amd64\n          ${{(startsWith(github.ref, 'refs/tags/') && 'linux/arm64') || ''}}\n        should-build-frontend: ${{steps.decide.outputs.should-build-frontend}}\n        should-build-backend: ${{steps.decide.outputs.should-build-backend}}\n        e2e-frontend: ${{steps.decide.outputs.e2e-frontend}}\n        e2e-backend: ${{steps.decide.outputs.e2e-backend}}\n      permissions:\n        contents: read\n      steps:\n        - uses: actions/checkout@v4\n        - uses: actions/setup-node@v4\n          with:\n            node-version: lts/*\n            cache: npm\n            cache-dependency-path: .github/actions/*/package-lock.json\n        - run: npm ci\n          working-directory: .github/actions/decide\n        - name: Decide\n          id: decide\n          uses: ./.github/actions/decide/\n          with:\n            pr-e2e-frontend-label-prefix: 'e2e-frontend:'\n            pr-e2e-backend-label-prefix: 'e2e-backend:'\n        - name: Docker metadata\n          id: meta\n          uses: docker/metadata-action@v5\n          with:\n            github-token: ${{ github.token }}\n            images: \"{0}/{1}\"\n            tags: |\n              type=raw,value=branch-{{branch}},enable=${{ !startsWith(github.ref, 'refs/tags/') && !startsWith(github.ref, 'refs/pull/') }}\n              type=sha,prefix=sha-,format=long,suffix=-dev,enable=${{ !startsWith(github.ref, 'refs/tags/') }}\n              type=sha,prefix=sha-,format=long,enable=${{ startsWith(github.ref, 'refs/tags/') }}\n              type=ref,event=pr\n              type=pep440,value=${{inputs.tag || github.ref_name}},pattern={{version}}\n              type=pep440,value=${{inputs.tag || github.ref_name}},pattern={{major}}.{{minor}}\n              type=pep440,value=${{inputs.tag || github.ref_name}},pattern={{major}}\n\n    build-backend:\n      name: Build and Push Backend Image\n      runs-on: ubuntu-latest\n      needs: docker-metadata\n      if: ${{needs.docker-metadata.outputs.should-build-backend == 'true'}}\n      permissions:\n        contents: read\n        packages: write\n        attestations: write\n        id-token: write\n      steps:\n        - name: Checkout\n          uses: actions/checkout@v4\n        - name: Login to Docker Hub\n          uses: docker/login-action@v3\n          with:\n            username: ${{ secrets.RELEASE_DOCKERHUB_USERNAME }}\n            password: ${{ secrets.RELEASE_DOCKERHUB_TOKEN }}\n        - name: Set up QEMU\n          uses: docker/setup-qemu-action@v3\n        - name: Set up Docker Buildx\n          uses: docker/setup-buildx-action@v3\n        - name: Build and push backend\n          uses: docker/build-push-action@v6\n          with:\n            context: backend\n            platforms: ${{needs.docker-metadata.outputs.platforms}}\n            push: true\n            tags: ${{format(needs.docker-metadata.outputs.tags, secrets.RELEASE_DOCKERHUB_USERNAME, 'backend')}}\n            labels: ${{needs.docker-metadata.outputs.labels}}\n            annotations: ${{needs.docker-metadata.outputs.annotations}}\n            cache-from: type=gha\n            cache-to: type=gha,mode=max\n\n    build-frontend:\n      name: Build and Push Frontend Image\n      runs-on: ubuntu-latest\n      needs: docker-metadata\n      if: ${{needs.docker-metadata.outputs.should-build-frontend == 'true'}}\n      permissions:\n        contents: read\n        packages: write\n        attestations: write\n        id-token: write\n      steps:\n        - name: Checkout\n          uses: actions/checkout@v4\n        - name: Login to Docker Hub\n          uses: docker/login-action@v3\n          with:\n            username: ${{ secrets.RELEASE_DOCKERHUB_USERNAME }}\n            password: ${{ secrets.RELEASE_DOCKERHUB_TOKEN }}\n        - name: Set up QEMU\n          uses: docker/setup-qemu-action@v3\n        - name: Set up Docker Buildx\n          uses: docker/setup-buildx-action@v3\n        - name: Build and push frontend\n          uses: docker/build-push-action@v6\n          with:\n            context: .\n            file: ./frontend/Dockerfile\n            platforms:  ${{needs.docker-metadata.outputs.platforms}}\n            push: true\n            tags: ${{format(needs.docker-metadata.outputs.tags, secrets.RELEASE_DOCKERHUB_USERNAME, 'frontend')}}\n            labels: ${{needs.docker-metadata.outputs.labels}}\n            annotations: ${{needs.docker-metadata.outputs.annotations}}\n            cache-from: type=gha\n            cache-to: type=gha,mode=max\n\n    e2e-test:\n      name: E2E Test\n      runs-on: ubuntu-latest\n      timeout-minutes: 10\n      defaults:\n        run:\n          working-directory: e2e\n      needs:\n        - docker-metadata\n        - build-backend\n        - build-frontend\n      if: |\n        !cancelled() &&\n        needs.docker-metadata.outputs.e2e-backend &&\n        needs.docker-metadata.outputs.e2e-frontend &&\n        !contains(needs.*.result, 'failure') &&\n        !contains(needs.*.result, 'cancelled')\n      steps:\n        - uses: actions/checkout@v4\n\n        - uses: actions/setup-node@v4\n          with:\n            node-version: lts/*\n            cache: npm\n            cache-dependency-path: e2e/package-lock.json\n\n        - name: Install dependencies\n          run: npm ci\n\n        - name: Install Playwright Browsers\n          run: npx playwright install --with-deps chromium\n\n        - name: Prepare Tests\n          run: ./prepare-test.sh\n          env:\n            E2E_DOCKER_TAG_FRONTEND: ${{needs.docker-metadata.outputs.e2e-frontend}}\n            E2E_DOCKER_TAG_BACKEND: ${{needs.docker-metadata.outputs.e2e-backend}}\n\n        - name: Run tests\n          run: ./start-test.sh\n          env:\n            E2E_DOCKER_TAG_FRONTEND: ${{needs.docker-metadata.outputs.e2e-frontend}}\n            E2E_DOCKER_TAG_BACKEND: ${{needs.docker-metadata.outputs.e2e-backend}}\n            E2E_LLM_PROVIDER: ${{vars.E2E_LLM_PROVIDER}}\n            E2E_LLM_MODEL: ${{vars.E2E_LLM_MODEL || ''}}\n            E2E_LLM_CREDENTIALS: ${{secrets.E2E_LLM_CREDENTIALS}}\n            E2E_EMBEDDING_PROVIDER: ${{vars.E2E_EMBEDDING_PROVIDER || 'openai'}}\n            E2E_EMBEDDING_MODEL: ${{vars.E2E_EMBEDDING_MODEL || ''}}\n            E2E_EMBEDDING_CREDENTIALS: ${{secrets.E2E_EMBEDDING_CREDENTIALS}}\n            E2E_RERANKER_PROVIDER: ${{vars.E2E_RERANKER_PROVIDER || 'jinaai'}}\n            E2E_RERANKER_MODEL: ${{vars.E2E_RERANKER_MODEL || ''}}\n            E2E_RERANKER_CREDENTIALS: ${{secrets.E2E_RERANKER_CREDENTIALS}}\n\n        - uses: actions/upload-artifact@v4\n          if: \"!cancelled()\"\n          with:\n            name: e2e-report\n            path: e2e/playwright-report/\n            retention-days: 30\n\n    e2e-test-deploy:\n      name: \"Deploy E2E Test Results\"\n      runs-on: ubuntu-latest\n      needs: e2e-test\n      if: |\n        always() \n        && !contains(needs.e2e-test.result, 'skipped') \n        && !contains(needs.e2e-test.result, 'cancelled')\n      defaults:\n        run:\n          working-directory: e2e\n      steps:\n        - uses: actions/checkout@v4\n\n        - uses: actions/setup-node@v4\n\n        - uses: actions/download-artifact@v4\n          with:\n            name: e2e-report\n            path: e2e/playwright-report/\n\n        - name: Deploy Test Results to Vercel\n          id: deploy-test-result\n          run: |\n            deploy_message=$(./deploy-test-result.sh)\n            echo \"deploy-message=${deploy_message}\" >> $GITHUB_OUTPUT\n          if: \"!cancelled()\"\n          env:\n            VERCEL_TOKEN: ${{secrets.E2E_VERCEL_TOKEN}}\n            VERCEL_ORG_ID: ${{secrets.E2E_VERCEL_ORG_ID}}\n            VERCEL_PROJECT_ID: ${{secrets.E2E_VERCEL_PROJECT_ID}}\n            VERCEL_CLI_ARGS: ${{(github.ref == 'refs/heads/main' && '--prod') || ''}}\n\n        - name: Find PR Comment\n          uses: peter-evans/find-comment@v3\n          if: github.event.pull_request.number\n          id: find-comment\n          with:\n            issue-number: ${{ github.event.pull_request.number }}\n            comment-author: 'github-actions[bot]'\n            body-includes: E2E Result Deployment\n\n        - name: Upsert Comment on PR\n          uses: peter-evans/create-or-update-comment@v4\n          if: github.event.pull_request.number\n          with:\n            comment-id: ${{ steps.find-comment.outputs.comment-id }}\n            issue-number: ${{ github.event.pull_request.number }}\n            body: |\n              ### E2E Result Deployment\n              ${{steps.deploy-test-result.outputs.deploy-message}}\n            edit-mode: replace\n"
  },
  {
    "path": ".github/workflows/verify.yml",
    "content": "name: Verify\n\non:\n  push:\n    branches:\n      - main\n    paths:\n      - frontend/**\n      - README.md\n  pull_request:\n    branches:\n      - main\n    paths:\n      - frontend/**\n      - README.md\n\njobs:\n  verify-frontend:\n    name: Verify frontend\n    runs-on: ubuntu-latest\n    defaults:\n      run:\n        working-directory: ./frontend\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n      - uses: pnpm/action-setup@v4\n        with:\n          run_install: false\n          package_json_file: ./frontend/package.json\n      - name: Install Node.js\n        uses: actions/setup-node@v4\n        with:\n          node-version-file: ./frontend/.nvmrc\n          cache-dependency-path: ./frontend\n          cache: 'pnpm'\n      - run: pnpm i --frozen-lockfile\n      - uses: actions/cache@v4\n        with:\n          path: |\n            ./frontend/.next/cache\n          key: ${{ runner.os }}-nextjs-${{ hashFiles('frontend/**/pnpm-lock.yaml') }}-${{ hashFiles('frontend/**/*.js', 'frontend/**/*.jsx', 'frontend/**/*.ts', 'frontend/**/*.tsx') }}\n      - run: pnpm run verify\n"
  },
  {
    "path": ".gitignore",
    "content": "*_dev.ipynb\n.idea\n.vscode\n.env\n.ruff_cache\n\nredis-data\ndata\nvenv\n.venv\nlocal-embedding-reranker\n\n*.swp\n*.swo\n\n.next\nnode_modules/\n_pagefind/\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# How to contribute\n\n## Contributing Guidelines\n\n[pingcap/autoflow](https://github.com/pingcap/autoflow) is an open-source project and we welcome contributions from the community. If you are interested in contributing to the project, please read the following guidelines.\n\n### Before You Get Started\n\n#### Software Prerequisites for Development\n\nIn this section, you should have some prerequisites software installed on your local machine:\n* [Docker](https://docs.docker.com/get-docker/)\n* [Docker Compose](https://docs.docker.com/compose/install/)\n* [Python](https://www.python.org/downloads/)\n* [Node.js](https://nodejs.org/en/download/)\n* [TiDB Cloud Serverless](https://pingcap.com/ai/?utm_source=tidb.ai&utm_medium=community) or [TiDB Self-Managed](https://www.pingcap.com/tidb-self-managed/?utm_source=tidb.ai&utm_medium=community)\n\n#### Setting up your development environment\n\nSetting up the project on your local machine is the first step to contributing to the project. You can clone the project from the GitHub repository and then start the project on your local machine. You can follow the instructions in the [Deployment Guide](https://autoflow.tidb.ai/deploy-with-docker) file to set up the project on your local machine.\n\nTo test your local changes, you can build and run the project using:\n\n```bash\ndocker compose -f docker-compose.dev.yml up\n```\n\n### Your First Contribution\n\nAll set to participate in the project? You can start by looking at the [open issues](https://github.com/pingcap/autoflow/issues) in this repo.\n\n\n### Components of the Project\n\nThe project is divided into several components, and you can contribute to any of the following components:\n* [Frontend](https://github.com/pingcap/autoflow/tree/main/frontend): The frontend of the project is built using Next.js.\n* [Backend](https://github.com/pingcap/tidb.ai/tree/main/backend): The backend of the project is built using FastAPI.\n  * [Data Source](https://github.com/pingcap/autoflow/tree/main/backend/app/rag/datasource): The Data Source component is responsible for indexing the data from different type of sources. You can add more data source types to the project.\n  * [LLM](https://github.com/pingcap/tidb.ai/tree/main/backend/app/rag/llms): The LLM Engine component is responsible for extracting knowledge from docs and generating responses. You can add more LLM models support to the project.\n  * [Reranker](https://github.com/pingcap/tidb.ai/blob/main/backend/app/rag/reranker_model_option.py): The Reranker Engine component is responsible for reranking the results retrieved from the database. You can add more Reranker models support to the project.\n  * [Embedding](https://github.com/pingcap/tidb.ai/blob/main/backend/app/rag/embed_model_option.py): The Embedding Engine component is responsible for converting text into vectors. You can add more Embedding models support to the project.\n  * [RAG & GraphRAG Engine](https://github.com/pingcap/tidb.ai/tree/main/backend/app/rag): The component is responsible for extracting knowldge from docs and then chunking, indexing and storing the data in the database, also includes retrieving the data from the database and generating the answer for the user.\n  * [Documentations](https://github.com/pingcap/tidb.ai/tree/main/frontend/app/src/pages): The documentation of the project is written in Markdown files. You can contribute to the documentation by adding more content to the documentation.\n\n### How to add an API?\nUsing the FastAPI framework.\n* Create a FastAPI Instance. \n    ```python\n    router = FastAPI()\n    ```\n* Use Decorators to Define API Endpoints. For example:\n    ```python\n    @router.get(\"xxx\")\n    ```\n* Implement the Route Handler Function.\n\n* Add sub-routes to the main route  and tag the sub-routes in `backend/app/api/main.py` .\n  ```python\n  api_router.include_router(sub_router, tags=[\"xxxx\"])  \n  ```\n\n## Maintainers\n\nPlease feel free to reach out to the maintainers if you have any questions or need help with the project.\n\n* [wd0517](https://github.com/wd0517)\n* [634750802](https://github.com/634750802)\n* [Mini256](https://github.com/Mini256)\n* [IANTHEREAL](https://github.com/IANTHEREAL)\n* [Cheese](https://github.com/Icemap)\n\n## Discussion\n\nIf you have any questions or suggestions, please feel free to open a discussion in the [Discussions](https://github.com/pingcap/tidb.ai/discussions)\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2025 PingCAP\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License."
  },
  {
    "path": "README.md",
    "content": "<!-- markdownlint-disable MD033 MD041 -->\n\n<div align=\"center\">\n<h1>AutoFlow</h1>\n  <a href='https://www.pingcap.com/tidb-cloud-serverless/?utm_source=tidb.ai&utm_medium=community'>\n    <img src=\"https://raw.githubusercontent.com/pingcap/autoflow/refs/heads/main/docs/public/icon-dark.svg\" alt=\"AutoFlow\" width =100 height=100></img>\n  </a>\n\n  <a href=\"https://trendshift.io/repositories/12294\" target=\"_blank\"><img src=\"https://trendshift.io/api/badge/repositories/12294\" alt=\"pingcap%2Fautoflow | Trendshift\" style=\"width: 250px; height: 55px;\" width=\"250\" height=\"55\"/></a>\n\n  [![Backend Docker Image Version](https://img.shields.io/docker/v/tidbai/backend?sort=semver&arch=amd64&label=tidbai%2Fbackend&color=blue&logo=fastapi)](https://hub.docker.com/r/tidbai/backend)\n  [![Frontend Docker Image Version](https://img.shields.io/docker/v/tidbai/frontend?sort=semver&arch=amd64&label=tidbai%2Ffrontend&&color=blue&logo=next.js)](https://hub.docker.com/r/tidbai/frontend)\n  [![E2E Status](https://img.shields.io/github/check-runs/pingcap/tidb.ai/main?nameFilter=E2E%20Test&label=e2e)](https://tidb-ai-playwright.vercel.app/)\n</div>\n\n> [!WARNING]\n> Autoflow is still in the early stages of development. And we are actively working on it, the next move is to make it to a python package and make it a RAG solution e.g. `pip install autoflow-ai`. If you have any questions or suggestions, please feel free to contact us on [Discussion](https://github.com/pingcap/autoflow/discussions).\n\n## Introduction\n\nAutoFlow is an open source graph rag (graphrag: knowledge graph rag) based knowledge base tool built on top of [TiDB Vector](https://www.pingcap.com/ai?utm_source=tidb.ai&utm_medium=community) and [LlamaIndex](https://github.com/run-llama/llama_index) and [DSPy](https://github.com/stanfordnlp/dspy).\n\n- **Live Demo**: [https://tidb.ai](https://tidb.ai?utm_source=tidb.ai&utm_medium=community)\n- **Deployment Docs**: [Deployment Docs](https://autoflow.tidb.ai/?utm_source=github&utm_medium=tidb.ai)\n\n## Features\n\n1. **Perplexity-style Conversational Search page**: Our platform features an advanced built-in website crawler, designed to elevate your browsing experience. This crawler effortlessly navigates official and documentation sites, ensuring comprehensive coverage and streamlined search processes through sitemap URL scraping.\n\n![Image](https://github.com/user-attachments/assets/50a4e5ce-8b93-446a-8ce7-11ed7844bd1e)\n\n2. **Embeddable JavaScript Snippet**: Integrate our conversational search window effortlessly into your website by copying and embedding a simple JavaScript code snippet. This widget, typically placed at the bottom right corner of your site, facilitates instant responses to product-related queries.\n\n![Image](https://github.com/user-attachments/assets/f0dc82db-c14d-4863-a242-c7da3a719568)\n\n## Deploy\n\n- [Deploy with Docker Compose](https://autoflow.tidb.ai/deploy-with-docker) (with: 4 CPU cores and 8GB RAM)\n\n## Tech Stack\n\n- [TiDB](https://www.pingcap.com/ai?utm_source=tidb.ai&utm_medium=community) – Database to store chat history, vector, json, and analytic\n- [LlamaIndex](https://www.llamaindex.ai/) - RAG framework\n- [DSPy](https://github.com/stanfordnlp/dspy) - The framework for programming—not prompting—foundation models\n- [Next.js](https://nextjs.org/) – Framework\n- [Tailwind CSS](https://tailwindcss.com/) – CSS framework\n- [shadcn/ui](https://ui.shadcn.com/) - Design\n\n## Contributing\n\nWe welcome contributions from the community. If you are interested in contributing to the project, please read the [Contributing Guidelines](/CONTRIBUTING.md).\n\n<a href=\"https://next.ossinsight.io/widgets/official/compose-last-28-days-stats?repo_id=752946440\" target=\"_blank\" style=\"display: block\" align=\"center\">\n  <picture>\n    <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=752946440&image_size=auto&color_scheme=dark\" width=\"655\" height=\"auto\">\n    <img alt=\"Performance Stats of pingcap/autoflow - Last 28 days\" src=\"https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=752946440&image_size=auto&color_scheme=light\" width=\"655\" height=\"auto\">\n  </picture>\n</a>\n<!-- Made with [OSS Insight](https://ossinsight.io/) -->\n\n## License\n\nAutoFlow is open-source under the Apache License, Version 2.0. You can [find it here](https://github.com/pingcap/autoflow/blob/main/LICENSE.txt).\n\n## Contact\n\nYou can reach out to us on [Discord](https://discord.gg/XzSW23Jg9p).\n"
  },
  {
    "path": "backend/.dockerignore",
    "content": "# MacOS\n.DS_Store\n\n# Environment\n.env\n.venv\n\n# Git\n.git\n\n# IDE\n.idea\n.vscode\n\n# Cache\n.pytest_cache\n.ruff_cache\n**/__pycache__\n\n# Test\n.tox\ntests\npytest.ini\n\n# Extra\nlocal_embedding_reranker/\n.pre-commit-config.yaml\n.gitignore\nDockerfile\n.dockerignore"
  },
  {
    "path": "backend/.gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\ncover/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\n.pybuilder/\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n#   For a library or package, you might want to ignore these files since the code is\n#   intended to run in multiple environments; otherwise, check them in:\n# .python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# poetry\n#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.\n#   This is especially recommended for binary packages to ensure reproducibility, and is more\n#   commonly ignored for libraries.\n#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control\n#poetry.lock\n\n# pdm\n#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.\n#pdm.lock\n#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it\n#   in version control.\n#   https://pdm.fming.dev/#use-with-ide\n.pdm.toml\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# pytype static type analyzer\n.pytype/\n\n# Cython debug symbols\ncython_debug/\n\n# PyCharm\n#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can\n#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore\n#  and can be added to the global gitignore or merged into this file.  For a more nuclear\n#  option (not recommended) you can uncomment the following to ignore the entire idea folder.\n#.idea/\n\n.ruff_cache\n\n*.csv\n\n.DS_Store\n\n# VSCode\n.vscode/\n\ncheckpoint.json"
  },
  {
    "path": "backend/.pre-commit-config.yaml",
    "content": "repos:\n- repo: https://github.com/astral-sh/ruff-pre-commit\n  # Ruff version.\n  rev: v0.8.6\n  hooks:\n    # Run the linter.\n    - id: ruff\n    # Run the formatter.\n    - id: ruff-format"
  },
  {
    "path": "backend/.python-version",
    "content": "3.12.3"
  },
  {
    "path": "backend/Dockerfile",
    "content": "FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim\n\nWORKDIR /app/\n\n# Setup supervisord.\nRUN apt-get update && apt-get install -y supervisor gcc\nCOPY supervisord.conf /usr/etc/supervisord.conf\n\n# Install dependencies.\nCOPY uv.lock /app/uv.lock\nCOPY pyproject.toml /app/pyproject.toml\n\nRUN PYTHONDONTWRITEBYTECODE=1 uv sync --frozen\nENV PATH=\"/app/.venv/bin:$PATH\"\n\n# Pre-download playwright dependencies.\nRUN playwright install --with-deps chromium\n\n# Pre-download nltk data.\nRUN python -c 'import nltk; \\\ndownload_dir = \"/usr/local/lib/python3.11/site-packages/llama_index/core/_static/nltk_cache\";\\\nnltk.download(\"stopwords\", download_dir=download_dir);\\\nnltk.download(\"punkt\", download_dir=download_dir);'\n\nENV PYTHONPATH=/app\n\nCOPY . /app/\n\n# Default number of workers\nENV WEB_CONCURRENCY=4\n\nCMD [\"sh\", \"-c\", \"fastapi run app/api_server.py --host 0.0.0.0 --port 80 --workers ${WEB_CONCURRENCY}\"]\n"
  },
  {
    "path": "backend/Makefile",
    "content": ".PHONY: test\n\nmakemigrations:\n\t@echo \"Creating migrations...\"\n\t@if [ -z \"$(NAME)\" ]; then \\\n\t\tuv run alembic revision --autogenerate; \\\n\telse \\\n\t\tuv run alembic revision --autogenerate -m \"$(NAME)\"; \\\n\tfi\n\nmigrate:\n\t@echo \"Migrating database...\"\n\t@uv run alembic upgrade head\n\nlint:\n\t@echo \"Linting code...\"\n\t@uv run ruff check .\n\nformat:\n\t@echo \"Formatting code...\"\n\t@uv run ruff format .\n\ntest:\n\t@echo \"Running tests...\"\n\t@uv run pytest -v tests/\n\ndev_backend:\n\t@echo \"Running backend server in development mode...\"\n\t@uv run fastapi dev app/api_server.py --host 127.0.0.1 --port 5001\n\nrun_backend:\n\t@echo \"Running backend server...\"\n\t@uv run fastapi run app/api_server.py --host 0.0.0.0 --port 5001 --workers 4\n\ndev_celery_flower:\n\t@echo \"Running Celery Flower...\"\n\t@uv run celery -A app.celery flower --address=0.0.0.0 --port=5555\n\ndev_background_worker:\n\t@echo \"Running background worker...\"\n\t@uv run celery -A app.celery worker -Q default -l INFO -E\n\ndev_eval_worker:\n\t@echo \"Running evaluation worker...\"\n\t@uv run celery -A app.celery worker -Q evaluation --loglevel=debug --pool=solo\n"
  },
  {
    "path": "backend/README.md",
    "content": "# Backend of tidb.ai\n\n\n## Development\n\n### Install dependencies\n\n1. Install [uv](https://docs.astral.sh/uv/getting-started/installation/)\n2. Use `uv` to install dependencies\n\n```bash\nuv sync\n```\n\n### Prepare environment\n\n```\ncp .env.example .env\n```\n\nEdit `.env` to set environment variables.\n\n\n### Run migrations\n\n```bash\nmake migrate\n```\n\n### Run development server\n\n```bash\nuv run python main.py runserver\n```"
  },
  {
    "path": "backend/alembic.ini",
    "content": "# A generic, single database configuration.\n\n[alembic]\n# path to migration scripts\nscript_location = app/alembic\n\n# template used to generate migration files\n# file_template = %%(rev)s_%%(slug)s\n\n# timezone to use when rendering the date\n# within the migration file as well as the filename.\n# string value is passed to dateutil.tz.gettz()\n# leave blank for localtime\n# timezone =\n\n# sys.path path, will be prepended to sys.path if present.\n# defaults to the current working directory.\nprepend_sys_path = .\n\n# max length of characters to apply to the\n# \"slug\" field\n#truncate_slug_length = 40\n\n# set to 'true' to run the environment during\n# the 'revision' command, regardless of autogenerate\n# revision_environment = false\n\n# set to 'true' to allow .pyc and .pyo files without\n# a source .py file to be detected as revisions in the\n# versions/ directory\n# sourceless = false\n\n# version location specification; this defaults\n# to alembic/versions.  When using multiple version\n# directories, initial revisions must be specified with --version-path\n# version_locations = %(here)s/bar %(here)s/bat alembic/versions\n\n# the output encoding used when revision files\n# are written from script.py.mako\n# output_encoding = utf-8\n\n# Logging configuration\n[loggers]\nkeys = root,sqlalchemy,alembic\n\n[handlers]\nkeys = console\n\n[formatters]\nkeys = generic\n\n[logger_root]\nlevel = WARN\nhandlers = console\nqualname =\n\n[logger_sqlalchemy]\nlevel = WARN\nhandlers =\nqualname = sqlalchemy.engine\n\n[logger_alembic]\nlevel = INFO\nhandlers =\nqualname = alembic\n\n[handler_console]\nclass = StreamHandler\nargs = (sys.stderr,)\nlevel = NOTSET\nformatter = generic\n\n[formatter_generic]\nformat = %(levelname)-5.5s [%(name)s] %(message)s\ndatefmt = %H:%M:%S\n"
  },
  {
    "path": "backend/app/__init__.py",
    "content": "import os\n\nos.environ[\"LITELLM_LOCAL_MODEL_COST_MAP\"] = \"True\"\n"
  },
  {
    "path": "backend/app/alembic/env.py",
    "content": "from logging.config import fileConfig\n\nfrom alembic import context\nfrom sqlalchemy import engine_from_config, pool\nfrom sqlmodel import SQLModel\nfrom tidb_vector.sqlalchemy import VectorType\n\nfrom app.core.config import settings\nfrom app.models import *  # noqa\nfrom app.models.knowledge_base_scoped.table_naming import (\n    KB_CHUNKS_TABLE_PATTERN,\n    KB_ENTITIES_TABLE_PATTERN,\n    KB_RELATIONSHIPS_TABLE_PATTERN,\n)\n\n# this is the Alembic Config object, which provides\n# access to the values within the .ini file in use.\nconfig = context.config\n\n# Interpret the config file for Python logging.\n# This line sets up loggers basically.\nfileConfig(config.config_file_name)\n\n# add your model's MetaData object here\n# for 'autogenerate' support\n# from myapp import mymodel\n# target_metadata = mymodel.Base.metadata\n# target_metadata = None\n\ntarget_metadata = SQLModel.metadata\n\n\ndef get_url():\n    return str(settings.SQLALCHEMY_DATABASE_URI)\n\n\ndef include_name(name, type_, parent_names):\n    if type_ == \"table\":\n        return (\n            not bool(KB_CHUNKS_TABLE_PATTERN.match(name))\n            and not bool(KB_ENTITIES_TABLE_PATTERN.match(name))\n            and not bool(KB_RELATIONSHIPS_TABLE_PATTERN.match(name))\n        )\n    else:\n        return True\n\n\ndef run_migrations_offline():\n    \"\"\"Run migrations in 'offline' mode.\n\n    This configures the context with just a URL\n    and not an Engine, though an Engine is acceptable\n    here as well.  By skipping the Engine creation\n    we don't even need a DBAPI to be available.\n\n    Calls to context.execute() here emit the given string to the\n    script output.\n\n    \"\"\"\n    url = get_url()\n    context.configure(\n        url=url,\n        target_metadata=target_metadata,\n        include_name=include_name,\n        literal_binds=True,\n        compare_type=True,\n    )\n\n    with context.begin_transaction():\n        context.run_migrations()\n\n\ndef run_migrations_online():\n    \"\"\"Run migrations in 'online' mode.\n\n    In this scenario we need to create an Engine\n    and associate a connection with the context.\n\n    \"\"\"\n    configuration = config.get_section(config.config_ini_section)\n    configuration[\"sqlalchemy.url\"] = get_url()\n    connectable = engine_from_config(\n        configuration,\n        prefix=\"sqlalchemy.\",\n        poolclass=pool.NullPool,\n    )\n\n    with connectable.connect() as connection:\n        connection.dialect.ischema_names[\"vector\"] = VectorType\n        context.configure(\n            connection=connection,\n            target_metadata=target_metadata,\n            include_name=include_name,\n            compare_type=True,\n        )\n\n        with context.begin_transaction():\n            context.run_migrations()\n\n\nif context.is_offline_mode():\n    run_migrations_offline()\nelse:\n    run_migrations_online()\n"
  },
  {
    "path": "backend/app/alembic/script.py.mako",
    "content": "\"\"\"${message}\n\nRevision ID: ${up_revision}\nRevises: ${down_revision | comma,n}\nCreate Date: ${create_date}\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\nfrom tidb_vector.sqlalchemy import VectorType\n${imports if imports else \"\"}\n\n# revision identifiers, used by Alembic.\nrevision = ${repr(up_revision)}\ndown_revision = ${repr(down_revision)}\nbranch_labels = ${repr(branch_labels)}\ndepends_on = ${repr(depends_on)}\n\n\ndef upgrade():\n    ${upgrades if upgrades else \"pass\"}\n\n\ndef downgrade():\n    ${downgrades if downgrades else \"pass\"}\n"
  },
  {
    "path": "backend/app/alembic/versions/00534dc350db_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 00534dc350db\nRevises: 10f36e8a25c4\nCreate Date: 2024-08-26 12:46:00.203425\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import mysql\n\n# revision identifiers, used by Alembic.\nrevision = \"00534dc350db\"\ndown_revision = \"10f36e8a25c4\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"embedding_models\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"OPENAI\"),\n        type_=sa.String(length=32),\n        existing_nullable=False,\n    )\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        existing_type=mysql.ENUM(\n            \"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\", \"BEDROCK\"\n        ),\n        type_=sa.String(length=32),\n        existing_nullable=False,\n    )\n    op.alter_column(\n        \"reranker_models\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"JINA\", \"COHERE\", \"BAISHENG\"),\n        type_=sa.String(length=32),\n        existing_nullable=False,\n    )\n    op.execute(\"UPDATE embedding_models SET provider = lower(provider)\")\n    op.execute(\"UPDATE llms SET provider = lower(provider)\")\n    op.execute(\"UPDATE reranker_models SET provider = lower(provider)\")\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"reranker_models\",\n        \"provider\",\n        existing_type=sa.String(length=32),\n        type_=mysql.ENUM(\"JINA\", \"COHERE\", \"BAISHENG\"),\n        existing_nullable=False,\n    )\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        existing_type=sa.String(length=32),\n        type_=mysql.ENUM(\n            \"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\", \"BEDROCK\"\n        ),\n        existing_nullable=False,\n    )\n    op.alter_column(\n        \"embedding_models\",\n        \"provider\",\n        existing_type=sa.String(length=32),\n        type_=mysql.ENUM(\"OPENAI\"),\n        existing_nullable=False,\n    )\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/041fbef26e3a_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 041fbef26e3a\nRevises: 04d81be446c3\nCreate Date: 2024-08-19 08:20:13.695891\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects.mysql import DATETIME\n\n\n# revision identifiers, used by Alembic.\nrevision = \"041fbef26e3a\"\ndown_revision = \"04d81be446c3\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"site_settings\",\n        \"created_at\",\n        existing_type=DATETIME(timezone=True, fsp=6),\n        server_default=sa.text(\"current_timestamp(6)\"),\n        nullable=False,\n    )\n    op.alter_column(\n        \"site_settings\",\n        \"updated_at\",\n        existing_type=DATETIME(timezone=True, fsp=6),\n        server_default=sa.text(\"current_timestamp(6) on update current_timestamp(6)\"),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"site_settings\",\n        \"created_at\",\n        existing_type=DATETIME(timezone=True),\n        nullable=False,\n    )\n    op.alter_column(\n        \"site_settings\",\n        \"updated_at\",\n        existing_type=DATETIME(timezone=True),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/04947f9684ab_public_chat_engine.py",
    "content": "\"\"\"public_chat_engine\n\nRevision ID: 04947f9684ab\nRevises: 211f3c5aa125\nCreate Date: 2025-05-28 15:13:22.058160\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n# revision identifiers, used by Alembic.\nrevision = \"04947f9684ab\"\ndown_revision = \"211f3c5aa125\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\"chat_engines\", sa.Column(\"is_public\", sa.Boolean(), nullable=False))\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"chat_engines\", \"is_public\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/04d4f05116ed_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 04d4f05116ed\nRevises: 94b198e20946\nCreate Date: 2024-07-23 01:26:07.117623\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\nfrom app.models.base import AESEncryptedColumn\n\n\n# revision identifiers, used by Alembic.\nrevision = \"04d4f05116ed\"\ndown_revision = \"94b198e20946\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"embedding_models\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),\n        sa.Column(\n            \"provider\", sa.Enum(\"OPENAI\", name=\"embeddingprovider\"), nullable=False\n        ),\n        sa.Column(\n            \"model\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False\n        ),\n        sa.Column(\"config\", sa.JSON(), nullable=True),\n        sa.Column(\"credentials\", AESEncryptedColumn(), nullable=True),\n        sa.Column(\"is_default\", sa.Boolean(), nullable=False),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"llms\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),\n        sa.Column(\n            \"provider\",\n            sa.Enum(\"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", name=\"llmprovider\"),\n            nullable=False,\n        ),\n        sa.Column(\n            \"model\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False\n        ),\n        sa.Column(\"config\", sa.JSON(), nullable=True),\n        sa.Column(\"credentials\", AESEncryptedColumn(), nullable=True),\n        sa.Column(\"is_default\", sa.Boolean(), nullable=False),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.add_column(\"chat_engines\", sa.Column(\"llm_id\", sa.Integer(), nullable=True))\n    op.add_column(\"chat_engines\", sa.Column(\"fast_llm_id\", sa.Integer(), nullable=True))\n    op.create_foreign_key(None, \"chat_engines\", \"llms\", [\"fast_llm_id\"], [\"id\"])\n    op.create_foreign_key(None, \"chat_engines\", \"llms\", [\"llm_id\"], [\"id\"])\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_constraint(None, \"chat_engines\", type_=\"foreignkey\")\n    op.drop_constraint(None, \"chat_engines\", type_=\"foreignkey\")\n    op.drop_column(\"chat_engines\", \"fast_llm_id\")\n    op.drop_column(\"chat_engines\", \"llm_id\")\n    op.drop_table(\"llms\")\n    op.drop_table(\"embedding_models\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/04d81be446c3_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 04d81be446c3\nRevises: e32f1e546eec\nCreate Date: 2024-08-08 17:11:50.178696\n\n\"\"\"\n\nfrom alembic import op\nfrom sqlalchemy.dialects import mysql\n\n# revision identifiers, used by Alembic.\nrevision = \"04d81be446c3\"\ndown_revision = \"e32f1e546eec\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        existing_type=mysql.ENUM(\n            \"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\", \"BEDROCK\"\n        ),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\"),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/10f36e8a25c4_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 10f36e8a25c4\nRevises: 041fbef26e3a\nCreate Date: 2024-08-20 09:10:50.130219\n\n\"\"\"\n\nfrom alembic import op\nfrom sqlalchemy.dialects import mysql\n\n# revision identifiers, used by Alembic.\nrevision = \"10f36e8a25c4\"\ndown_revision = \"041fbef26e3a\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"reranker_models\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"JINA\", \"COHERE\", \"BAISHENG\"),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"reranker_models\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"JINA\", \"COHERE\"),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/197bc8be72d1_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 197bc8be72d1\nRevises: 04d4f05116ed\nCreate Date: 2024-07-25 14:49:29.363595\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"197bc8be72d1\"\ndown_revision = \"04d4f05116ed\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        type_=sa.Enum(\n            \"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\", name=\"llmprovider\"\n        ),\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        type_=sa.Enum(\"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", name=\"llmprovider\"),\n    )\n"
  },
  {
    "path": "backend/app/alembic/versions/211f3c5aa125_chunking_settings.py",
    "content": "\"\"\"chunking_settings\n\nRevision ID: 211f3c5aa125\nRevises: 2adc0b597dcd\nCreate Date: 2025-02-17 14:20:56.253857\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"211f3c5aa125\"\ndown_revision = \"2adc0b597dcd\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\n        \"knowledge_bases\", sa.Column(\"chunking_config\", sa.JSON(), nullable=True)\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"knowledge_bases\", \"chunking_config\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/27a6723b767a_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 27a6723b767a\nRevises: d2ad44deab20\nCreate Date: 2024-11-29 20:38:05.773083\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n# revision identifiers, used by Alembic.\nrevision = \"27a6723b767a\"\ndown_revision = \"d2ad44deab20\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\n        \"chat_messages\",\n        sa.Column(\"is_best_answer\", sa.Boolean(), server_default=\"0\", nullable=False),\n    )\n    op.create_index(\n        \"ix_chat_message_is_best_answer\",\n        \"chat_messages\",\n        [\"is_best_answer\"],\n        unique=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_index(\"ix_chat_message_is_best_answer\", table_name=\"chat_messages\")\n    op.drop_column(\"chat_messages\", \"is_best_answer\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/2adc0b597dcd_int_enum_type.py",
    "content": "\"\"\"int_enum_type\n\nRevision ID: 2adc0b597dcd\nRevises: a54f966436ce\nCreate Date: 2025-01-24 17:58:08.339090\n\n\"\"\"\n\nfrom alembic import op\nfrom sqlalchemy.dialects import mysql\n\nfrom app.models.base import IntEnumType\nfrom app.models.chat import ChatVisibility\n\n# revision identifiers, used by Alembic.\nrevision = \"2adc0b597dcd\"\ndown_revision = \"a54f966436ce\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"chats\",\n        \"visibility\",\n        existing_type=mysql.SMALLINT(),\n        type_=IntEnumType(ChatVisibility),\n        existing_nullable=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"chats\",\n        \"visibility\",\n        existing_type=IntEnumType(ChatVisibility),\n        type_=mysql.SMALLINT(),\n        existing_nullable=False,\n    )\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/2fc10c21bf88_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 5fdea8e26454\nRevises:\nCreate Date: 2024-07-10 14:43:55.913126\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\nfrom tidb_vector.sqlalchemy import VectorType\nfrom sqlalchemy.dialects import mysql\nfrom app.core.config import settings\n\n# revision identifiers, used by Alembic.\nrevision = \"2fc10c21bf88\"\ndown_revision = None\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"chat_engines\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),\n        sa.Column(\"engine_options\", sa.JSON(), nullable=True),\n        sa.Column(\"is_default\", sa.Boolean(), nullable=False),\n        sa.Column(\"deleted_at\", sa.DateTime(), nullable=True),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"documents\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"hash\", sqlmodel.sql.sqltypes.AutoString(length=32), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),\n        sa.Column(\"content\", mysql.MEDIUMTEXT(), nullable=True),\n        sa.Column(\n            \"mime_type\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False\n        ),\n        sa.Column(\n            \"source_uri\", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=False\n        ),\n        sa.Column(\"meta\", sa.JSON(), nullable=True),\n        sa.Column(\"last_modified_at\", sa.DateTime(), nullable=True),\n        sa.Column(\n            \"index_status\",\n            sa.Enum(\n                \"NOT_STARTED\",\n                \"PENDING\",\n                \"RUNNING\",\n                \"COMPLETED\",\n                \"FAILED\",\n                name=\"docindextaskstatus\",\n            ),\n            nullable=False,\n        ),\n        sa.Column(\"index_result\", sa.Text(), nullable=True),\n        sa.PrimaryKeyConstraint(\"id\"),\n        sa.UniqueConstraint(\"source_uri\"),\n    )\n    op.create_table(\n        \"entities\",\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=False),\n        sa.Column(\"description\", sa.Text(), nullable=True),\n        sa.Column(\"meta\", sa.JSON(), nullable=True),\n        sa.Column(\n            \"entity_type\",\n            sa.Enum(\"original\", \"synopsis\", name=\"entitytype\"),\n            nullable=False,\n        ),\n        sa.Column(\"synopsis_info\", sa.JSON(), nullable=True),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\n            \"description_vec\",\n            VectorType(dim=settings.EMBEDDING_DIMS),\n            nullable=True,\n            comment=\"hnsw(distance=cosine)\",\n        ),\n        sa.Column(\n            \"meta_vec\",\n            VectorType(dim=settings.EMBEDDING_DIMS),\n            nullable=True,\n            comment=\"hnsw(distance=cosine)\",\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"semantic_cache\",\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"query\", sa.Text(), nullable=True),\n        sa.Column(\n            \"query_vec\",\n            VectorType(dim=settings.EMBEDDING_DIMS),\n            nullable=True,\n            comment=\"hnsw(distance=cosine)\",\n        ),\n        sa.Column(\"value\", sa.Text(), nullable=True),\n        sa.Column(\n            \"value_vec\",\n            VectorType(dim=settings.EMBEDDING_DIMS),\n            nullable=True,\n            comment=\"hnsw(distance=cosine)\",\n        ),\n        sa.Column(\"meta\", sa.JSON(), nullable=True),\n        sa.Column(\n            \"created_at\", sa.DateTime(), server_default=sa.text(\"now()\"), nullable=True\n        ),\n        sa.Column(\n            \"updated_at\", sa.DateTime(), server_default=sa.text(\"now()\"), nullable=True\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n        mysql_TTL=\"created_at + INTERVAL 1 MONTH;\",\n    )\n    op.create_table(\n        \"site_settings\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),\n        sa.Column(\n            \"data_type\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False\n        ),\n        sa.Column(\"value\", sa.JSON(), nullable=True),\n        sa.PrimaryKeyConstraint(\"id\"),\n        sa.UniqueConstraint(\"name\"),\n    )\n    op.create_table(\n        \"staff_action_logs\",\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"action\", sqlmodel.sql.sqltypes.AutoString(), nullable=False),\n        sa.Column(\n            \"action_time\", sa.DateTime(), server_default=sa.text(\"now()\"), nullable=True\n        ),\n        sa.Column(\"target_type\", sqlmodel.sql.sqltypes.AutoString(), nullable=False),\n        sa.Column(\"target_id\", sa.Integer(), nullable=False),\n        sa.Column(\"before\", sa.JSON(), nullable=True),\n        sa.Column(\"after\", sa.JSON(), nullable=True),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"users\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.Column(\"email\", sqlmodel.sql.sqltypes.AutoString(), nullable=False),\n        sa.Column(\n            \"hashed_password\", sqlmodel.sql.sqltypes.AutoString(), nullable=False\n        ),\n        sa.Column(\"is_active\", sa.Boolean(), nullable=False),\n        sa.Column(\"is_superuser\", sa.Boolean(), nullable=False),\n        sa.Column(\"is_verified\", sa.Boolean(), nullable=False),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_index(op.f(\"ix_users_email\"), \"users\", [\"email\"], unique=True)\n    op.create_index(op.f(\"ix_users_id\"), \"users\", [\"id\"], unique=False)\n    op.create_table(\n        \"api_keys\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\n            \"description\", sqlmodel.sql.sqltypes.AutoString(length=100), nullable=False\n        ),\n        sa.Column(\n            \"hashed_secret\",\n            sqlmodel.sql.sqltypes.AutoString(length=255),\n            nullable=False,\n        ),\n        sa.Column(\n            \"api_key_display\",\n            sqlmodel.sql.sqltypes.AutoString(length=100),\n            nullable=False,\n        ),\n        sa.Column(\"is_active\", sa.Boolean(), nullable=False),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n        sa.UniqueConstraint(\"hashed_secret\"),\n    )\n    op.create_table(\n        \"chats\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.Column(\n            \"title\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False\n        ),\n        sa.Column(\"engine_id\", sa.Integer(), nullable=True),\n        sa.Column(\"engine_options\", sa.JSON(), nullable=True),\n        sa.Column(\"deleted_at\", sa.DateTime(), nullable=True),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"engine_id\"],\n            [\"chat_engines.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_index(op.f(\"ix_chats_id\"), \"chats\", [\"id\"], unique=False)\n    op.create_table(\n        \"chunks\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.Column(\"hash\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),\n        sa.Column(\"text\", sa.Text(), nullable=True),\n        sa.Column(\"meta\", sa.JSON(), nullable=True),\n        sa.Column(\n            \"embedding\",\n            VectorType(dim=settings.EMBEDDING_DIMS),\n            nullable=True,\n            comment=\"hnsw(distance=cosine)\",\n        ),\n        sa.Column(\"document_id\", sa.Integer(), nullable=True),\n        sa.Column(\"relations\", sa.JSON(), nullable=True),\n        sa.Column(\n            \"source_uri\", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=True\n        ),\n        sa.Column(\n            \"index_status\",\n            sa.Enum(\n                \"NOT_STARTED\",\n                \"PENDING\",\n                \"RUNNING\",\n                \"COMPLETED\",\n                \"FAILED\",\n                name=\"kgindexstatus\",\n            ),\n            nullable=False,\n        ),\n        sa.Column(\"index_result\", sa.Text(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"document_id\"],\n            [\"documents.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_index(op.f(\"ix_chunks_id\"), \"chunks\", [\"id\"], unique=False)\n    op.create_table(\n        \"relationships\",\n        sa.Column(\"description\", sa.Text(), nullable=True),\n        sa.Column(\"meta\", sa.JSON(), nullable=True),\n        sa.Column(\"weight\", sa.Integer(), nullable=False),\n        sa.Column(\"source_entity_id\", sa.Integer(), nullable=False),\n        sa.Column(\"target_entity_id\", sa.Integer(), nullable=False),\n        sa.Column(\"last_modified_at\", sa.DateTime(), nullable=True),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\n            \"description_vec\",\n            VectorType(dim=settings.EMBEDDING_DIMS),\n            nullable=True,\n            comment=\"hnsw(distance=cosine)\",\n        ),\n        sa.ForeignKeyConstraint(\n            [\"source_entity_id\"],\n            [\"entities.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"target_entity_id\"],\n            [\"entities.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"user_sessions\",\n        sa.Column(\"token\", sqlmodel.sql.sqltypes.AutoString(length=43), nullable=False),\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"token\"),\n    )\n    op.create_table(\n        \"chat_messages\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"ordinal\", sa.Integer(), nullable=False),\n        sa.Column(\"role\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),\n        sa.Column(\"content\", sa.Text(), nullable=True),\n        sa.Column(\"error\", sa.Text(), nullable=True),\n        sa.Column(\"sources\", sa.JSON(), nullable=True),\n        sa.Column(\n            \"trace_url\", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=True\n        ),\n        sa.Column(\"finished_at\", sa.DateTime(), nullable=True),\n        sa.Column(\"chat_id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"chat_id\"],\n            [\"chats.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"feedbacks\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"feedback_type\",\n            sa.Enum(\"LIKE\", \"DISLIKE\", name=\"feedbacktype\"),\n            nullable=False,\n        ),\n        sa.Column(\n            \"comment\", sqlmodel.sql.sqltypes.AutoString(length=500), nullable=False\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"chat_id\", sqlmodel.sql.sqltypes.GUID(), nullable=False),\n        sa.Column(\"chat_message_id\", sa.Integer(), nullable=False),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"chat_id\"],\n            [\"chats.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"chat_message_id\"],\n            [\"chat_messages.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_table(\"feedbacks\")\n    op.drop_table(\"chat_messages\")\n    op.drop_table(\"user_sessions\")\n    op.drop_table(\"relationships\")\n    op.drop_index(op.f(\"ix_chunks_id\"), table_name=\"chunks\")\n    op.drop_table(\"chunks\")\n    op.drop_index(op.f(\"ix_chats_id\"), table_name=\"chats\")\n    op.drop_table(\"chats\")\n    op.drop_table(\"api_keys\")\n    op.drop_index(op.f(\"ix_users_id\"), table_name=\"users\")\n    op.drop_index(op.f(\"ix_users_email\"), table_name=\"users\")\n    op.drop_table(\"users\")\n    op.drop_table(\"staff_action_logs\")\n    op.drop_table(\"site_settings\")\n    op.drop_table(\"semantic_cache\")\n    op.drop_table(\"entities\")\n    op.drop_table(\"documents\")\n    op.drop_table(\"chat_engines\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/749767db5505_add_recommend_questions.py",
    "content": "\"\"\"add recommend questions\n\nRevision ID: 749767db5505\nRevises: 8093333c0d87\nCreate Date: 2024-10-15 16:02:14.203584\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"749767db5505\"\ndown_revision = \"8093333c0d87\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"recommend_questions\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"questions\", sa.JSON(), nullable=True),\n        sa.Column(\"chat_message_id\", sa.Integer(), nullable=False),\n        sa.ForeignKeyConstraint(\n            [\"chat_message_id\"],\n            [\"chat_messages.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_index(\n        op.f(\"ix_recommend_questions_chat_message_id\"),\n        \"recommend_questions\",\n        [\"chat_message_id\"],\n        unique=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_index(\n        op.f(\"ix_recommend_questions_chat_message_id\"), table_name=\"recommend_questions\"\n    )\n    op.drop_table(\"recommend_questions\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/8093333c0d87_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 8093333c0d87\nRevises: 830fd9c44f39\nCreate Date: 2024-09-24 12:23:48.076576\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"8093333c0d87\"\ndown_revision = \"830fd9c44f39\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\"chats\", sa.Column(\"visibility\", sa.SmallInteger(), nullable=False))\n    op.execute(\"UPDATE chats SET visibility = 1 WHERE user_id IS NULL;\")\n    op.execute(\n        \"UPDATE chats SET engine_options = CAST(JSON_UNQUOTE(engine_options) AS JSON);\"\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"chats\", \"visibility\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/830fd9c44f39_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 830fd9c44f39\nRevises: dfee070b8abd\nCreate Date: 2024-09-19 13:04:30.351449\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\n\n\n# revision identifiers, used by Alembic.\nrevision = \"830fd9c44f39\"\ndown_revision = \"dfee070b8abd\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\n        \"chats\",\n        sa.Column(\n            \"origin\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=True\n        ),\n    )\n    op.add_column(\n        \"feedbacks\",\n        sa.Column(\n            \"origin\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=True\n        ),\n    )\n    op.add_column(\n        \"chat_messages\",\n        sa.Column(\n            \"post_verification_result_url\",\n            sqlmodel.sql.sqltypes.AutoString(length=512),\n            nullable=True,\n        ),\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"chat_messages\", \"post_verification_result_url\")\n    op.drop_column(\"feedbacks\", \"origin\")\n    op.drop_column(\"chats\", \"origin\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/94b198e20946_.py",
    "content": "\"\"\"empty message\n\nRevision ID: 94b198e20946\nRevises: 2fc10c21bf88\nCreate Date: 2024-07-11 15:19:19.174568\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\n\n\n# revision identifiers, used by Alembic.\nrevision = \"94b198e20946\"\ndown_revision = \"2fc10c21bf88\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"data_sources\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),\n        sa.Column(\n            \"description\", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=False\n        ),\n        sa.Column(\n            \"data_source_type\",\n            sqlmodel.sql.sqltypes.AutoString(length=256),\n            nullable=False,\n        ),\n        sa.Column(\"config\", sa.JSON(), nullable=True),\n        sa.Column(\"build_kg_index\", sa.Boolean(), nullable=False),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"uploads\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),\n        sa.Column(\"size\", sa.Integer(), nullable=False),\n        sa.Column(\"path\", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),\n        sa.Column(\n            \"mime_type\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False\n        ),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.add_column(\"documents\", sa.Column(\"data_source_id\", sa.Integer(), nullable=True))\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"documents\", \"data_source_id\")\n    op.drop_table(\"uploads\")\n    op.drop_table(\"data_sources\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/a54f966436ce_evaluation.py",
    "content": "\"\"\"evaluation\n\nRevision ID: a54f966436ce\nRevises: 27a6723b767a\nCreate Date: 2024-12-09 16:46:21.077517\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\n\n# revision identifiers, used by Alembic.\nrevision = \"a54f966436ce\"\ndown_revision = \"27a6723b767a\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"evaluation_datasets\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"evaluation_tasks\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),\n        sa.Column(\"user_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.Column(\"dataset_id\", sa.Integer(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"user_id\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"evaluation_dataset_items\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"query\", sa.Text(), nullable=True),\n        sa.Column(\"reference\", sa.Text(), nullable=True),\n        sa.Column(\"retrieved_contexts\", sa.JSON(), nullable=True),\n        sa.Column(\"extra\", sa.JSON(), nullable=True),\n        sa.Column(\"evaluation_dataset_id\", sa.Integer(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"evaluation_dataset_id\"],\n            [\"evaluation_datasets.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"evaluation_task_items\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\n            \"chat_engine\", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False\n        ),\n        sa.Column(\"status\", sa.String(length=32), nullable=False),\n        sa.Column(\"query\", sa.Text(), nullable=True),\n        sa.Column(\"reference\", sa.Text(), nullable=True),\n        sa.Column(\"response\", sa.Text(), nullable=True),\n        sa.Column(\"retrieved_contexts\", sa.JSON(), nullable=True),\n        sa.Column(\"extra\", sa.JSON(), nullable=True),\n        sa.Column(\"error_msg\", sa.Text(), nullable=True),\n        sa.Column(\"factual_correctness\", sa.Float(), nullable=True),\n        sa.Column(\"semantic_similarity\", sa.Float(), nullable=True),\n        sa.Column(\"evaluation_task_id\", sa.Integer(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"evaluation_task_id\"],\n            [\"evaluation_tasks.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_table(\"evaluation_task_items\")\n    op.drop_table(\"evaluation_dataset_items\")\n    op.drop_table(\"evaluation_tasks\")\n    op.drop_table(\"evaluation_datasets\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/a8c79553c9f6_.py",
    "content": "\"\"\"empty message\n\nRevision ID: a8c79553c9f6\nRevises: ac6e4d58580d\nCreate Date: 2024-08-05 13:04:17.572821\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"a8c79553c9f6\"\ndown_revision = \"ac6e4d58580d\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\"data_sources\", sa.Column(\"llm_id\", sa.Integer(), nullable=True))\n    op.create_foreign_key(None, \"data_sources\", \"llms\", [\"llm_id\"], [\"id\"])\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_constraint(None, \"data_sources\", type_=\"foreignkey\")\n    op.drop_column(\"data_sources\", \"llm_id\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/ac6e4d58580d_.py",
    "content": "\"\"\"empty message\n\nRevision ID: ac6e4d58580d\nRevises: 197bc8be72d1\nCreate Date: 2024-08-01 16:15:59.164348\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\nfrom sqlalchemy.dialects import mysql\n\n# revision identifiers, used by Alembic.\nrevision = \"ac6e4d58580d\"\ndown_revision = \"197bc8be72d1\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\n        \"chats\",\n        sa.Column(\n            \"browser_id\", sqlmodel.sql.sqltypes.AutoString(length=50), nullable=True\n        ),\n    )\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\"),\n        nullable=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"llms\",\n        \"provider\",\n        existing_type=mysql.ENUM(\"OPENAI\", \"GEMINI\", \"ANTHROPIC_VERTEX\", \"OPENAI_LIKE\"),\n        nullable=True,\n    )\n    op.drop_column(\"chats\", \"browser_id\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/bd17a4ebccc5_.py",
    "content": "\"\"\"empty message\n\nRevision ID: bd17a4ebccc5\nRevises: a8c79553c9f6\nCreate Date: 2024-08-08 01:20:42.069228\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\n\n\n# revision identifiers, used by Alembic.\nrevision = \"bd17a4ebccc5\"\ndown_revision = \"a8c79553c9f6\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\"data_sources\", sa.Column(\"deleted_at\", sa.DateTime(), nullable=True))\n    op.drop_index(\"source_uri\", table_name=\"documents\")\n    op.add_column(\n        \"relationships\",\n        sa.Column(\"chunk_id\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n    )\n    op.add_column(\n        \"relationships\",\n        sa.Column(\"document_id\", sa.Integer(), nullable=True),\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"relationships\", \"chunk_id\")\n    op.drop_column(\"relationships\", \"document_id\")\n    op.create_index(\"source_uri\", \"documents\", [\"source_uri\"], unique=True)\n    op.drop_column(\"data_sources\", \"deleted_at\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/c7f016a904c1_.py",
    "content": "\"\"\"empty message\n\nRevision ID: c7f016a904c1\nRevises: 749767db5505\nCreate Date: 2024-10-30 13:28:17.345385\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"c7f016a904c1\"\ndown_revision = \"749767db5505\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\"chat_messages\", sa.Column(\"meta\", sa.JSON(), nullable=True))\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"chat_messages\", \"meta\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/d2ad44deab20_multiple_kb.py",
    "content": "\"\"\"multiple_kb\n\nRevision ID: d2ad44deab20\nRevises: c7f016a904c1\nCreate Date: 2024-11-15 09:51:42.493749\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\nfrom sqlalchemy import update\nfrom sqlmodel import Session\nfrom sqlalchemy.dialects import mysql\n\nfrom app.core.config import settings\nfrom app.core.db import engine\nfrom app.models import EmbeddingModel\n\n# revision identifiers, used by Alembic.\nrevision = \"d2ad44deab20\"\ndown_revision = \"c7f016a904c1\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"knowledge_bases\",\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),\n        sa.Column(\"description\", mysql.MEDIUMTEXT(), nullable=True),\n        sa.Column(\"index_methods\", sa.JSON(), nullable=True),\n        sa.Column(\"llm_id\", sa.Integer(), nullable=True),\n        sa.Column(\"embedding_model_id\", sa.Integer(), nullable=True),\n        sa.Column(\"documents_total\", sa.Integer(), nullable=False),\n        sa.Column(\"data_sources_total\", sa.Integer(), nullable=False),\n        sa.Column(\"created_by\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.Column(\n            \"created_at\", sa.DateTime(), server_default=sa.text(\"now()\"), nullable=True\n        ),\n        sa.Column(\"updated_by\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.Column(\n            \"updated_at\", sa.DateTime(), server_default=sa.text(\"now()\"), nullable=True\n        ),\n        sa.Column(\"deleted_by\", sqlmodel.sql.sqltypes.GUID(), nullable=True),\n        sa.Column(\"deleted_at\", sa.DateTime(), nullable=True),\n        sa.ForeignKeyConstraint(\n            [\"created_by\"],\n            [\"users.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"deleted_by\"],\n            [\"users.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"embedding_model_id\"],\n            [\"embedding_models.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"llm_id\"],\n            [\"llms.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"updated_by\"],\n            [\"users.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.create_table(\n        \"knowledge_base_datasources\",\n        sa.Column(\"knowledge_base_id\", sa.Integer(), nullable=False),\n        sa.Column(\"data_source_id\", sa.Integer(), nullable=False),\n        sa.ForeignKeyConstraint(\n            [\"data_source_id\"],\n            [\"data_sources.id\"],\n        ),\n        sa.ForeignKeyConstraint(\n            [\"knowledge_base_id\"],\n            [\"knowledge_bases.id\"],\n        ),\n        sa.PrimaryKeyConstraint(\"knowledge_base_id\", \"data_source_id\"),\n    )\n    op.add_column(\n        \"documents\", sa.Column(\"knowledge_base_id\", sa.Integer(), nullable=True)\n    )\n    op.create_foreign_key(\n        \"fk_d_on_data_source_id\",\n        \"documents\",\n        \"data_sources\",\n        [\"data_source_id\"],\n        [\"id\"],\n    )\n    op.create_foreign_key(\n        \"fk_d_on_knowledge_base_id\",\n        \"documents\",\n        \"knowledge_bases\",\n        [\"knowledge_base_id\"],\n        [\"id\"],\n    )\n    op.add_column(\n        \"embedding_models\", sa.Column(\"vector_dimension\", sa.Integer(), nullable=False)\n    )\n    # ### end Alembic commands ###\n\n    # ### Data Migration ###\n    with Session(engine) as session:\n        stmt = (\n            update(EmbeddingModel)\n            .where(EmbeddingModel.vector_dimension == 0)\n            .values(vector_dimension=settings.EMBEDDING_DIMS)\n        )\n        session.exec(stmt)\n        session.commit()\n    # ### end Data Migration ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"embedding_models\", \"vector_dimension\")\n    op.drop_constraint(\"fk_d_on_data_source_id\", \"documents\", type_=\"foreignkey\")\n    op.drop_constraint(\"fk_d_on_knowledge_base_id\", \"documents\", type_=\"foreignkey\")\n    op.drop_column(\"documents\", \"knowledge_base_id\")\n    op.drop_table(\"knowledge_base_datasources\")\n    op.drop_table(\"knowledge_bases\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/dfee070b8abd_.py",
    "content": "\"\"\"empty message\n\nRevision ID: dfee070b8abd\nRevises: eb0b85608c0a\nCreate Date: 2024-09-10 10:45:50.318277\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import mysql\n\n# revision identifiers, used by Alembic.\nrevision = \"dfee070b8abd\"\ndown_revision = \"eb0b85608c0a\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.add_column(\"chat_messages\", sa.Column(\"graph_data\", sa.JSON(), nullable=True))\n    op.create_index(\"idx_entity_type\", \"entities\", [\"entity_type\"], unique=False)\n    op.alter_column(\n        \"site_settings\",\n        \"created_at\",\n        existing_type=mysql.DATETIME(fsp=6),\n        nullable=True,\n        existing_server_default=sa.text(\"CURRENT_TIMESTAMP(6)\"),\n    )\n    op.alter_column(\n        \"site_settings\",\n        \"updated_at\",\n        existing_type=mysql.DATETIME(fsp=6),\n        nullable=True,\n        existing_server_default=sa.text(\n            \"CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6)\"\n        ),\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"site_settings\",\n        \"updated_at\",\n        existing_type=mysql.DATETIME(fsp=6),\n        nullable=False,\n        existing_server_default=sa.text(\n            \"CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6)\"\n        ),\n    )\n    op.alter_column(\n        \"site_settings\",\n        \"created_at\",\n        existing_type=mysql.DATETIME(fsp=6),\n        nullable=False,\n        existing_server_default=sa.text(\"CURRENT_TIMESTAMP(6)\"),\n    )\n    op.drop_index(\"idx_entity_type\", table_name=\"entities\")\n    op.drop_column(\"chat_messages\", \"graph_data\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/e32f1e546eec_.py",
    "content": "\"\"\"empty message\n\nRevision ID: e32f1e546eec\nRevises: bd17a4ebccc5\nCreate Date: 2024-08-08 03:55:14.042290\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlmodel.sql.sqltypes\nfrom app.models.base import AESEncryptedColumn\n\n\n# revision identifiers, used by Alembic.\nrevision = \"e32f1e546eec\"\ndown_revision = \"bd17a4ebccc5\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.create_table(\n        \"reranker_models\",\n        sa.Column(\n            \"created_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\n            \"updated_at\",\n            sa.DateTime(timezone=True),\n            server_default=sa.text(\"now()\"),\n            nullable=True,\n        ),\n        sa.Column(\"name\", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),\n        sa.Column(\n            \"provider\",\n            sa.Enum(\"JINA\", \"COHERE\", name=\"rerankerprovider\"),\n            nullable=False,\n        ),\n        sa.Column(\n            \"model\", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False\n        ),\n        sa.Column(\"top_n\", sa.Integer(), nullable=False),\n        sa.Column(\"config\", sa.JSON(), nullable=True),\n        sa.Column(\"is_default\", sa.Boolean(), nullable=False),\n        sa.Column(\"id\", sa.Integer(), nullable=False),\n        sa.Column(\"credentials\", AESEncryptedColumn(), nullable=True),\n        sa.PrimaryKeyConstraint(\"id\"),\n    )\n    op.add_column(\"chat_engines\", sa.Column(\"reranker_id\", sa.Integer(), nullable=True))\n    op.create_foreign_key(\n        None, \"chat_engines\", \"reranker_models\", [\"reranker_id\"], [\"id\"]\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.drop_column(\"chat_engines\", \"reranker_id\")\n    op.drop_table(\"reranker_models\")\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/alembic/versions/eb0b85608c0a_.py",
    "content": "\"\"\"empty message\n\nRevision ID: eb0b85608c0a\nRevises: 00534dc350db\nCreate Date: 2024-08-28 15:10:04.219389\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import mysql\n\n# revision identifiers, used by Alembic.\nrevision = \"eb0b85608c0a\"\ndown_revision = \"00534dc350db\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"documents\",\n        \"mime_type\",\n        existing_type=mysql.VARCHAR(length=64),\n        type_=sa.String(length=128),\n        existing_nullable=False,\n    )\n    op.alter_column(\n        \"uploads\",\n        \"mime_type\",\n        existing_type=mysql.VARCHAR(length=64),\n        type_=sa.String(length=128),\n        existing_nullable=False,\n    )\n    # ### end Alembic commands ###\n\n\ndef downgrade():\n    # ### commands auto generated by Alembic - please adjust! ###\n    op.alter_column(\n        \"uploads\",\n        \"mime_type\",\n        existing_type=sa.String(length=128),\n        type_=mysql.VARCHAR(length=64),\n        existing_nullable=False,\n    )\n    op.alter_column(\n        \"documents\",\n        \"mime_type\",\n        existing_type=sa.String(length=128),\n        type_=mysql.VARCHAR(length=64),\n        existing_nullable=False,\n    )\n    # ### end Alembic commands ###\n"
  },
  {
    "path": "backend/app/api/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/chat/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/chat/routes.py",
    "content": "from typing import Optional\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page, Params\n\nfrom app.models.chat import ChatOrigin\nfrom app.api.deps import CurrentSuperuserDep, SessionDep\nfrom app.repositories import chat_repo\n\n\nrouter = APIRouter(\n    prefix=\"/admin/chats\",\n    tags=[\"admin/chats\"],\n)\n\n\n@router.get(\"/origins\")\ndef list_chat_origins(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    search: Optional[str] = None,\n    params: Params = Depends(),\n) -> Page[ChatOrigin]:\n    return chat_repo.list_chat_origins(db_session, search, params)\n"
  },
  {
    "path": "backend/app/api/admin_routes/chat_engine.py",
    "content": "from fastapi import APIRouter, Depends\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.exceptions import DefaultChatEngineCannotBeDeleted\nfrom app.rag.chat.config import ChatEngineConfig\nfrom app.repositories import chat_engine_repo\nfrom app.models import ChatEngine, ChatEngineUpdate\n\nrouter = APIRouter()\n\n\n@router.get(\"/admin/chat-engines\")\ndef list_chat_engines(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: Params = Depends(),\n) -> Page[ChatEngine]:\n    return chat_engine_repo.paginate(db_session, params)\n\n\n@router.post(\"/admin/chat-engines\")\ndef create_chat_engine(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    chat_engine: ChatEngine,\n) -> ChatEngine:\n    return chat_engine_repo.create(db_session, chat_engine)\n\n\n@router.get(\"/admin/chat-engines/{chat_engine_id}\")\ndef get_chat_engine(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    chat_engine_id: int,\n) -> ChatEngine:\n    return chat_engine_repo.must_get(db_session, chat_engine_id)\n\n\n@router.put(\"/admin/chat-engines/{chat_engine_id}\")\ndef update_chat_engine(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    chat_engine_id: int,\n    update: ChatEngineUpdate,\n) -> ChatEngine:\n    chat_engine = chat_engine_repo.must_get(db_session, chat_engine_id)\n    return chat_engine_repo.update(db_session, chat_engine, update)\n\n\n@router.delete(\"/admin/chat-engines/{chat_engine_id}\")\ndef delete_chat_engine(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    chat_engine_id: int,\n) -> ChatEngine:\n    chat_engine = chat_engine_repo.must_get(db_session, chat_engine_id)\n    if chat_engine.is_default:\n        raise DefaultChatEngineCannotBeDeleted(chat_engine_id)\n    return chat_engine_repo.delete(db_session, chat_engine)\n\n\n@router.get(\"/admin/chat-engines-default-config\")\ndef get_default_config(\n    db_session: SessionDep, user: CurrentSuperuserDep\n) -> ChatEngineConfig:\n    return ChatEngineConfig()\n"
  },
  {
    "path": "backend/app/api/admin_routes/document/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/document/routes.py",
    "content": "from typing import Annotated\n\nfrom fastapi import APIRouter, Depends, Query\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.admin_routes.knowledge_base.document.models import (\n    DocumentFilters,\n    DocumentItem,\n)\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.repositories import document_repo\n\nrouter = APIRouter()\n\n\n@router.get(\"/admin/documents\")\ndef list_documents(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    filters: Annotated[DocumentFilters, Query()],\n    params: Params = Depends(),\n) -> Page[DocumentItem]:\n    return document_repo.paginate(\n        session=session,\n        filters=filters,\n        params=params,\n    )\n"
  },
  {
    "path": "backend/app/api/admin_routes/embedding_model/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/embedding_model/models.py",
    "content": "from datetime import datetime\nfrom typing import Any\n\nfrom pydantic import BaseModel, field_validator\nfrom typing_extensions import Optional\nfrom app.rag.embeddings.provider import EmbeddingProvider\n\n\nclass EmbeddingModelCreate(BaseModel):\n    name: str\n    provider: EmbeddingProvider\n    model: str\n    vector_dimension: int\n    config: dict | list | None\n    credentials: Any\n    is_default: Optional[bool] = False\n\n    @field_validator(\"vector_dimension\")\n    def vector_dimension_must_gt_1(cls, v: int) -> int:\n        if v <= 0:\n            raise ValueError(\n                \"The vector dimension of the Embedding model should be at least greater than 1.\"\n            )\n        return v\n\n\nclass EmbeddingModelUpdate(BaseModel):\n    name: Optional[str] = None\n    config: Optional[dict | list] = None\n    credentials: Optional[str | dict] = None\n\n\nclass EmbeddingModelItem(BaseModel):\n    id: int\n    name: str\n    provider: EmbeddingProvider\n    model: str\n    vector_dimension: int\n    is_default: bool\n\n\nclass EmbeddingModelDetail(BaseModel):\n    id: int\n    name: str\n    provider: EmbeddingProvider\n    model: str\n    vector_dimension: int\n    config: dict | list | None\n    is_default: bool\n    created_at: datetime\n    updated_at: datetime\n\n\nclass EmbeddingModelTestResult(BaseModel):\n    success: bool\n    error: str = \"\"\n"
  },
  {
    "path": "backend/app/api/admin_routes/embedding_model/routes.py",
    "content": "from typing import List\n\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.admin_routes.embedding_model.models import (\n    EmbeddingModelItem,\n    EmbeddingModelDetail,\n    EmbeddingModelUpdate,\n    EmbeddingModelTestResult,\n    EmbeddingModelCreate,\n)\nfrom app.api.deps import CurrentSuperuserDep, SessionDep\nfrom app.repositories.embedding_model import embedding_model_repo\nfrom app.rag.embeddings.provider import (\n    EmbeddingProviderOption,\n    embedding_provider_options,\n)\nfrom app.rag.embeddings.resolver import resolve_embed_model\nfrom app.logger import logger\n\nrouter = APIRouter()\n\n\n@router.get(\"/admin/embedding-models/providers/options\")\ndef list_embedding_model_provider_options(\n    user: CurrentSuperuserDep,\n) -> List[EmbeddingProviderOption]:\n    return embedding_provider_options\n\n\n@router.get(\"/admin/embedding-models\")\ndef list_embedding_models(\n    db_session: SessionDep, user: CurrentSuperuserDep, params: Params = Depends()\n) -> Page[EmbeddingModelItem]:\n    return embedding_model_repo.paginate(db_session, params)\n\n\n@router.post(\"/admin/embedding-models/test\")\ndef test_embedding_model(\n    user: CurrentSuperuserDep,\n    create: EmbeddingModelCreate,\n) -> EmbeddingModelTestResult:\n    try:\n        embed_model = resolve_embed_model(\n            provider=create.provider,\n            model=create.model,\n            config=create.config,\n            credentials=create.credentials,\n        )\n        embedding = embed_model.get_query_embedding(\"Hello, world!\")\n        expected_length = create.vector_dimension\n        if len(embedding) != expected_length:\n            raise ValueError(\n                f\"Embedding model is configured with {expected_length} dimensions, but got vector embedding with {len(embedding)} dimensions.\"\n            )\n        success = True\n        error = \"\"\n    except Exception as e:\n        logger.info(f\"Failed to test embedding model: {e}\")\n        success = False\n        error = str(e)\n    return EmbeddingModelTestResult(success=success, error=error)\n\n\n@router.post(\"/admin/embedding-models\")\ndef create_embedding_model(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    create: EmbeddingModelCreate,\n) -> EmbeddingModelDetail:\n    return embedding_model_repo.create(db_session, create)\n\n\n@router.get(\"/admin/embedding-models/{model_id}\")\ndef get_embedding_model_detail(\n    db_session: SessionDep, user: CurrentSuperuserDep, model_id: int\n) -> EmbeddingModelDetail:\n    return embedding_model_repo.must_get(db_session, model_id)\n\n\n@router.put(\"/admin/embedding-models/{model_id}\")\ndef update_embedding_model(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    model_id: int,\n    update: EmbeddingModelUpdate,\n) -> EmbeddingModelDetail:\n    embed_model = embedding_model_repo.must_get(db_session, model_id)\n    return embedding_model_repo.update(db_session, embed_model, update)\n\n\n@router.delete(\"/admin/embedding-models/{model_id}\")\ndef delete_embedding_model(\n    db_session: SessionDep, user: CurrentSuperuserDep, model_id: int\n) -> None:\n    embedding_model = embedding_model_repo.must_get(db_session, model_id)\n    embedding_model_repo.delete(db_session, embedding_model)\n\n\n@router.put(\"/admin/embedding-models/{model_id}/set_default\")\ndef set_default_embedding_model(\n    db_session: SessionDep, user: CurrentSuperuserDep, model_id: int\n) -> EmbeddingModelDetail:\n    embed_model = embedding_model_repo.must_get(db_session, model_id)\n    return embedding_model_repo.set_default(db_session, embed_model)\n"
  },
  {
    "path": "backend/app/api/admin_routes/evaluation/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/evaluation/evaluation_dataset.py",
    "content": "import pandas as pd\nfrom fastapi import APIRouter, status, HTTPException, Depends\nfrom fastapi_pagination import Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlmodel import select, desc\n\nfrom app.api.admin_routes.evaluation.models import (\n    CreateEvaluationDataset,\n    UpdateEvaluationDataset,\n    ModifyEvaluationDatasetItem,\n    ParamsWithKeyword,\n)\nfrom app.api.admin_routes.evaluation.tools import must_get\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.file_storage import default_file_storage\nfrom app.models import Upload, EvaluationDataset, EvaluationDatasetItem\nfrom app.types import MimeTypes\n\nrouter = APIRouter()\n\n\n@router.post(\"/admin/evaluation/datasets\")\ndef create_evaluation_dataset(\n    evaluation_dataset: CreateEvaluationDataset,\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n) -> EvaluationDataset:\n    \"\"\"\n    Create a dataset for a given question and chat engine.\n    This API depends on the /admin/uploads API to upload the evaluation data.\n    The evaluation data is expected to be a CSV file with the following columns:\n\n    - query: The query to evaluate\n    - reference: The expected response to the query\n\n    You can add more columns to the CSV file, and the extra columns will adhere to the results.\n\n    Args:\n        evaluation_dataset.name: The name of the evaluation dataset.\n        evaluation_dataset.upload_id: The ID of the uploaded CSV file of the evaluation dataset.\n\n    Returns:\n        True if the evaluation dataset is created successfully.\n    \"\"\"\n    name = evaluation_dataset.name\n    evaluation_data_list = []\n    if evaluation_dataset.upload_id is not None:\n        # If the evaluation_file_id is provided, validate the uploaded file\n        evaluation_file_id = evaluation_dataset.upload_id\n        upload = must_get(session, Upload, evaluation_file_id)\n\n        if upload.mime_type != MimeTypes.CSV:\n            raise HTTPException(\n                status_code=status.HTTP_400_BAD_REQUEST,\n                detail=\"The uploaded file must be a CSV file.\",\n            )\n\n        with default_file_storage.open(upload.path) as f:\n            df = pd.read_csv(f)\n\n            # check essential columns\n            must_have_columns = [\"query\", \"reference\"]\n            if not set(must_have_columns).issubset(df.columns):\n                raise HTTPException(\n                    status_code=status.HTTP_400_BAD_REQUEST,\n                    detail=f\"The uploaded file must have the following columns: {must_have_columns}\",\n                )\n\n            eval_list = df.to_dict(orient=\"records\")\n            # create evaluation dataset items\n            evaluation_data_list = [\n                EvaluationDatasetItem(\n                    query=item[\"query\"],\n                    reference=item[\"reference\"],\n                    retrieved_contexts=[],  # TODO: implement this after we can retrieve contexts\n                    extra={k: item[k] for k in item if k not in must_have_columns},\n                )\n                for item in eval_list\n            ]\n\n    evaluation_dataset = EvaluationDataset(\n        name=name,\n        user_id=user.id,\n        evaluation_data_list=evaluation_data_list,\n    )\n\n    session.add(evaluation_dataset)\n    session.commit()\n    session.refresh(evaluation_dataset)\n\n    return evaluation_dataset\n\n\n@router.delete(\"/admin/evaluation/datasets/{evaluation_dataset_id}\")\ndef delete_evaluation_dataset(\n    evaluation_dataset_id: int, session: SessionDep, user: CurrentSuperuserDep\n) -> bool:\n    evaluation_dataset = must_get(session, EvaluationDataset, evaluation_dataset_id)\n\n    session.delete(evaluation_dataset)\n    session.commit()\n\n    return True\n\n\n@router.put(\"/admin/evaluation/datasets/{evaluation_dataset_id}\")\ndef update_evaluation_dataset(\n    evaluation_dataset_id: int,\n    updated_evaluation_dataset: UpdateEvaluationDataset,\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n) -> EvaluationDataset:\n    evaluation_dataset = must_get(session, EvaluationDataset, evaluation_dataset_id)\n\n    evaluation_dataset.name = updated_evaluation_dataset.name\n\n    session.merge(evaluation_dataset)\n    session.commit()\n    session.refresh(evaluation_dataset)\n\n    return evaluation_dataset\n\n\n@router.get(\"/admin/evaluation/datasets\")\ndef list_evaluation_dataset(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: ParamsWithKeyword = Depends(),\n) -> Page[EvaluationDataset]:\n    stmt = select(EvaluationDataset).order_by(desc(EvaluationDataset.id))\n\n    if params.keyword:\n        stmt = stmt.where(EvaluationDataset.name.ilike(f\"%{params.keyword}%\"))\n\n    return paginate(session, stmt, params)\n\n\n@router.post(\"/admin/evaluation/dataset-items\")\ndef create_evaluation_dataset_item(\n    modify_evaluation_dataset_item: ModifyEvaluationDatasetItem,\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n) -> EvaluationDatasetItem:\n    evaluation_dataset_item = EvaluationDatasetItem(\n        query=modify_evaluation_dataset_item.query,\n        reference=modify_evaluation_dataset_item.reference,\n        retrieved_contexts=modify_evaluation_dataset_item.retrieved_contexts,\n        extra=modify_evaluation_dataset_item.extra,\n        evaluation_dataset_id=modify_evaluation_dataset_item.evaluation_dataset_id,\n    )\n\n    session.add(evaluation_dataset_item)\n    session.commit()\n    session.refresh(evaluation_dataset_item)\n\n    return evaluation_dataset_item\n\n\n@router.delete(\"/admin/evaluation/dataset-items/{evaluation_dataset_item_id}\")\ndef delete_evaluation_dataset_item(\n    evaluation_dataset_item_id: int, session: SessionDep, user: CurrentSuperuserDep\n) -> bool:\n    evaluation_dataset_item = must_get(\n        session, EvaluationDatasetItem, evaluation_dataset_item_id\n    )\n\n    session.delete(evaluation_dataset_item)\n    session.commit()\n\n    return True\n\n\n@router.put(\"/admin/evaluation/dataset-items/{evaluation_dataset_item_id}\")\ndef update_evaluation_dataset_item(\n    evaluation_dataset_item_id: int,\n    updated_evaluation_dataset_item: ModifyEvaluationDatasetItem,\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n) -> EvaluationDatasetItem:\n    evaluation_dataset_item = must_get(\n        session, EvaluationDatasetItem, evaluation_dataset_item_id\n    )\n\n    evaluation_dataset_item.query = updated_evaluation_dataset_item.query\n    evaluation_dataset_item.reference = updated_evaluation_dataset_item.reference\n    evaluation_dataset_item.retrieved_contexts = (\n        updated_evaluation_dataset_item.retrieved_contexts\n    )\n    evaluation_dataset_item.extra = updated_evaluation_dataset_item.extra\n    evaluation_dataset_item.evaluation_dataset_id = (\n        updated_evaluation_dataset_item.evaluation_dataset_id\n    )\n    session.merge(evaluation_dataset_item)\n    session.commit()\n    session.refresh(evaluation_dataset_item)\n\n    return evaluation_dataset_item\n\n\n@router.get(\"/admin/evaluation/datasets/{evaluation_dataset_id}/dataset-items\")\ndef list_evaluation_dataset_item(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    evaluation_dataset_id: int,\n    params: ParamsWithKeyword = Depends(),\n) -> Page[EvaluationDatasetItem]:\n    stmt = (\n        select(EvaluationDatasetItem)\n        .where(EvaluationDatasetItem.evaluation_dataset_id == evaluation_dataset_id)\n        .order_by(EvaluationDatasetItem.id)\n    )\n\n    if params.keyword:\n        stmt = stmt.where(EvaluationDatasetItem.query.ilike(f\"%{params.keyword}%\"))\n    return paginate(session, stmt, params)\n\n\n@router.get(\"/admin/evaluation/dataset-items/{evaluation_dataset_item_id}\")\ndef get_evaluation_dataset_item(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    evaluation_dataset_item_id: int,\n) -> EvaluationDatasetItem:\n    return must_get(session, EvaluationDatasetItem, evaluation_dataset_item_id)\n"
  },
  {
    "path": "backend/app/api/admin_routes/evaluation/evaluation_task.py",
    "content": "import logging\nfrom typing import Optional, List\n\nimport sqlmodel\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlalchemy import func, update\nfrom sqlalchemy.orm import Session\nfrom sqlmodel import select, case, desc\n\nfrom app.api.admin_routes.evaluation.models import (\n    CreateEvaluationTask,\n    EvaluationTaskSummary,\n    ParamsWithKeyword,\n    EvaluationTaskOverview,\n)\nfrom app.api.admin_routes.evaluation.tools import must_get\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.models import (\n    EvaluationTask,\n    EvaluationTaskItem,\n    EvaluationStatus,\n    EvaluationDataset,\n)\nfrom app.tasks.evaluate import add_evaluation_task\n\nrouter = APIRouter()\n\nlogger = logging.getLogger(__name__)\n\n\n@router.post(\"/admin/evaluation/tasks\")\ndef create_evaluation_task(\n    evaluation_task: CreateEvaluationTask,\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n) -> Optional[EvaluationTask]:\n    \"\"\"\n    Create an evaluation task from the evaluation dataset.\n\n    Args:\n        evaluation_task.name: The name of the evaluation task.\n        evaluation_task.evaluation_dataset_id: The ID of the uploaded evaluation dataset.\n        evaluation_task.chat_engine: The chat engine to evaluate the queries against. Default is \"default\".\n        evaluation_task.run_size: The number of queries to evaluate. Default is None, which means all queries in the CSV file.\n\n    Returns:\n        True if the evaluation task is created successfully.\n    \"\"\"\n\n    name = evaluation_task.name\n    evaluation_dataset_id = evaluation_task.evaluation_dataset_id\n    chat_engine = evaluation_task.chat_engine\n    run_size = evaluation_task.run_size\n\n    dataset = must_get(session, EvaluationDataset, evaluation_dataset_id)\n\n    if run_size is not None and run_size < len(dataset.evaluation_data_list):\n        dataset.evaluation_data_list = dataset.evaluation_data_list[:run_size]\n\n    # create evaluation items\n    # caveat: Do the deep copy on purpose to avoid the side effect of the original dataset modification\n    evaluation_task_items = [\n        EvaluationTaskItem(\n            status=EvaluationStatus.NOT_START,\n            chat_engine=chat_engine,\n            query=item.query,\n            reference=item.reference,\n            retrieved_contexts=item.retrieved_contexts,\n            extra=item.extra,\n        )\n        for item in dataset.evaluation_data_list\n    ]\n\n    evaluation_task = EvaluationTask(\n        name=name,\n        user_id=user.id,\n        evaluation_task_items=evaluation_task_items,\n        dataset_id=evaluation_dataset_id,\n    )\n\n    session.add(evaluation_task)\n    session.commit()\n    session.refresh(evaluation_task)\n\n    add_evaluation_task.delay(evaluation_task.id)\n\n    return evaluation_task\n\n\n@router.delete(\"/admin/evaluation/tasks/{evaluation_task_id}\")\ndef cancel_evaluation_task(\n    evaluation_task_id: int, session: SessionDep, user: CurrentSuperuserDep\n) -> Optional[bool]:\n    must_get(session, EvaluationTask, evaluation_task_id)\n\n    session.exec(\n        update(EvaluationTaskItem)\n        .where(EvaluationTaskItem.evaluation_task_id == evaluation_task_id)\n        .values(status=EvaluationStatus.CANCEL)\n    )\n    session.commit()\n\n    return True\n\n\n@router.get(\"/admin/evaluation/tasks/{evaluation_task_id}\")\ndef get_evaluation_task(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    evaluation_task_id: int,\n) -> EvaluationTask:\n    return must_get(session, EvaluationTask, evaluation_task_id)\n\n\n@router.get(\"/admin/evaluation/tasks/{evaluation_task_id}/summary\")\ndef get_evaluation_task_summary(\n    evaluation_task_id: int, session: SessionDep, user: CurrentSuperuserDep\n) -> EvaluationTaskSummary:\n    task = must_get(session, EvaluationTask, evaluation_task_id)\n    return get_summary_for_evaluation_task(task, session)\n\n\n@router.get(\"/admin/evaluation/tasks\")\ndef list_evaluation_task(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: ParamsWithKeyword = Depends(),\n) -> Page[EvaluationTaskSummary]:\n    stmt = select(EvaluationTask).order_by(desc(EvaluationTask.id))\n    if params.keyword:\n        stmt = stmt.where(EvaluationTask.name.ilike(f\"%{params.keyword}%\"))\n\n    task_page: Page[EvaluationTask] = paginate(session, stmt, params)\n    summaries: List[EvaluationTaskSummary] = []\n    for task in task_page.items:\n        summaries.append(get_summary_for_evaluation_task(task, session))\n\n    return Page[EvaluationTaskSummary](\n        items=summaries,\n        total=task_page.total,\n        page=task_page.page,\n        size=task_page.size,\n        pages=task_page.pages,\n    )\n\n\n@router.get(\"/admin/evaluation/tasks/{evaluation_task_id}/items\")\ndef list_evaluation_task_items(\n    evaluation_task_id: int,\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: ParamsWithKeyword = Depends(),\n) -> Page[EvaluationTaskItem]:\n    must_get(session, EvaluationTask, evaluation_task_id)\n    stmt = select(EvaluationTaskItem).where(\n        EvaluationTaskItem.evaluation_task_id == evaluation_task_id\n    )\n    if params.keyword:\n        stmt = stmt.where(\n            sqlmodel.or_(\n                EvaluationTaskItem.query.ilike(f\"%{params.keyword}%\"),\n                EvaluationTaskItem.reference.ilike(f\"%{params.keyword}%\"),\n            )\n        )\n    stmt.order_by(EvaluationTaskItem.id)\n\n    return paginate(session, stmt, params)\n\n\ndef get_summary_for_evaluation_task(\n    evaluation_task: EvaluationTask, session: Session\n) -> EvaluationTaskSummary:\n    status_counts = (\n        session.query(\n            func.count(\n                case(\n                    (EvaluationTaskItem.status == EvaluationStatus.NOT_START, 1),\n                    else_=None,\n                )\n            ).label(\"not_start\"),\n            func.count(\n                case(\n                    (EvaluationTaskItem.status == EvaluationStatus.EVALUATING, 1),\n                    else_=None,\n                )\n            ).label(\"evaluating\"),\n            func.count(\n                case(\n                    (EvaluationTaskItem.status == EvaluationStatus.DONE, 1), else_=None\n                )\n            ).label(\"done\"),\n            func.count(\n                case(\n                    (EvaluationTaskItem.status == EvaluationStatus.ERROR, 1), else_=None\n                )\n            ).label(\"error\"),\n            func.count(\n                case(\n                    (EvaluationTaskItem.status == EvaluationStatus.CANCEL, 1),\n                    else_=None,\n                )\n            ).label(\"cancel\"),\n        )\n        .filter(EvaluationTaskItem.evaluation_task_id == evaluation_task.id)\n        .one()\n    )\n\n    stats = {}\n    if status_counts.not_start == 0 and status_counts.evaluating == 0:\n        stats_tuple = (\n            session.query(\n                func.avg(EvaluationTaskItem.factual_correctness).label(\n                    \"avg_factual_correctness\"\n                ),\n                func.avg(EvaluationTaskItem.semantic_similarity).label(\n                    \"avg_semantic_similarity\"\n                ),\n                func.min(EvaluationTaskItem.factual_correctness).label(\n                    \"min_factual_correctness\"\n                ),\n                func.min(EvaluationTaskItem.semantic_similarity).label(\n                    \"min_semantic_similarity\"\n                ),\n                func.max(EvaluationTaskItem.factual_correctness).label(\n                    \"max_factual_correctness\"\n                ),\n                func.max(EvaluationTaskItem.semantic_similarity).label(\n                    \"max_semantic_similarity\"\n                ),\n                func.stddev(EvaluationTaskItem.factual_correctness).label(\n                    \"std_factual_correctness\"\n                ),\n                func.stddev(EvaluationTaskItem.semantic_similarity).label(\n                    \"std_semantic_similarity\"\n                ),\n            )\n            .filter(\n                EvaluationTaskItem.evaluation_task_id == evaluation_task.id,\n                EvaluationTaskItem.status == EvaluationStatus.DONE,\n                EvaluationTaskItem.factual_correctness.isnot(None),\n                EvaluationTaskItem.semantic_similarity.isnot(None),\n            )\n            .one()\n        )\n\n        stats = dict(stats_tuple._mapping)\n        logger.info(stats)\n\n    return EvaluationTaskSummary(\n        summary=EvaluationTaskOverview(\n            not_start=status_counts.not_start,\n            succeed=status_counts.done,\n            errored=status_counts.error,\n            progressing=status_counts.evaluating,\n            cancel=status_counts.cancel,\n            avg_factual_correctness=stats.get(\"avg_factual_correctness\", 0),\n            avg_semantic_similarity=stats.get(\"avg_semantic_similarity\", 0),\n            min_factual_correctness=stats.get(\"min_factual_correctness\", 0),\n            min_semantic_similarity=stats.get(\"min_semantic_similarity\", 0),\n            max_factual_correctness=stats.get(\"max_factual_correctness\", 0),\n            max_semantic_similarity=stats.get(\"max_semantic_similarity\", 0),\n            std_factual_correctness=stats.get(\"std_factual_correctness\", 0),\n            std_semantic_similarity=stats.get(\"std_semantic_similarity\", 0),\n        ),\n        **evaluation_task.model_dump(),\n    )\n"
  },
  {
    "path": "backend/app/api/admin_routes/evaluation/models.py",
    "content": "from typing import Optional\nfrom uuid import UUID\nfrom datetime import datetime\n\nfrom fastapi_pagination import Params\nfrom pydantic import BaseModel\n\n\nclass CreateEvaluationTask(BaseModel):\n    name: str\n    evaluation_dataset_id: int\n    chat_engine: str = \"default\"\n    run_size: Optional[int] = None\n\n\nclass EvaluationTaskOverview(BaseModel):\n    not_start: int\n    succeed: int\n    errored: int\n    progressing: int\n    cancel: int\n    avg_factual_correctness: Optional[float]\n    avg_semantic_similarity: Optional[float]\n    min_factual_correctness: Optional[float]\n    min_semantic_similarity: Optional[float]\n    max_factual_correctness: Optional[float]\n    max_semantic_similarity: Optional[float]\n    std_factual_correctness: Optional[float]\n    std_semantic_similarity: Optional[float]\n\n\nclass EvaluationTaskSummary(BaseModel):\n    id: Optional[int]\n    name: str\n    user_id: UUID\n    dataset_id: int\n    created_at: Optional[datetime]\n    updated_at: Optional[datetime]\n\n    summary: EvaluationTaskOverview\n\n\nclass UpdateEvaluationDataset(BaseModel):\n    name: str\n\n\nclass CreateEvaluationDataset(BaseModel):\n    name: str\n    upload_id: Optional[int] = None\n\n\nclass ModifyEvaluationDatasetItem(BaseModel):\n    query: str\n    reference: str\n    retrieved_contexts: list[str]\n    extra: dict\n    evaluation_dataset_id: int\n\n\nclass ParamsWithKeyword(Params):\n    keyword: Optional[str] = None\n"
  },
  {
    "path": "backend/app/api/admin_routes/evaluation/tools.py",
    "content": "from typing import TypeVar, Type\nfrom fastapi import status, HTTPException\nfrom sqlmodel import SQLModel, Session\n\nT = TypeVar(\"T\", bound=SQLModel)\n\n\ndef must_get(session: Session, model: Type[T], item_id: int) -> T:\n    item = session.get(model, item_id)\n    if not item:\n        raise HTTPException(\n            status_code=status.HTTP_404_NOT_FOUND,\n            detail=f\"{model.__name__} with ID {item_id} not found\",\n        )\n    return item\n\n\ndef must_get_and_belong(\n    session: Session, model: Type[T], item_id: int, user_id: int\n) -> T:\n    item = must_get(session, model, item_id)\n\n    if not hasattr(item, \"user_id\"):\n        raise HTTPException(\n            status_code=status.HTTP_400_BAD_REQUEST,\n            detail=f\"{model.__name__} does not have a 'user_id' field\",\n        )\n\n    if item.user_id != user_id:\n        raise HTTPException(\n            status_code=status.HTTP_403_FORBIDDEN,\n            detail=f\"{model.__name__} with ID {item_id} does not belong to user {user_id}\",\n        )\n\n    return item\n"
  },
  {
    "path": "backend/app/api/admin_routes/feedback.py",
    "content": "from typing import Annotated, Optional\n\nfrom fastapi import APIRouter, Depends, Query\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.models import AdminFeedbackPublic, FeedbackFilters\nfrom app.models.feedback import FeedbackOrigin\nfrom app.repositories import feedback_repo\n\nrouter = APIRouter(\n    prefix=\"/admin/feedbacks\",\n    tags=[\"admin/feedback\"],\n)\n\n\n@router.get(\"/\")\ndef list_feedbacks(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    filters: Annotated[FeedbackFilters, Query()],\n    params: Params = Depends(),\n) -> Page[AdminFeedbackPublic]:\n    return feedback_repo.paginate(\n        session=session,\n        filters=filters,\n        params=params,\n    )\n\n\n@router.get(\"/origins\")\ndef list_feedback_origins(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    search: Optional[str] = None,\n    params: Params = Depends(),\n) -> Page[FeedbackOrigin]:\n    return feedback_repo.list_feedback_origins(session, search, params)\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/chunk/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/chunk/models.py",
    "content": "from pydantic import BaseModel\n\nfrom app.rag.retrievers.chunk.schema import VectorSearchRetrieverConfig\n\n\nclass KBChunkRetrievalConfig(BaseModel):\n    vector_search: VectorSearchRetrieverConfig\n    # TODO: add fulltext and knowledge graph search config\n\n\nclass KBRetrieveChunksRequest(BaseModel):\n    query: str\n    retrieval_config: KBChunkRetrievalConfig\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/chunk/routes.py",
    "content": "import logging\n\nfrom fastapi import APIRouter\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.rag.retrievers.chunk.simple_retriever import (\n    ChunkSimpleRetriever,\n)\nfrom app.rag.retrievers.chunk.schema import ChunksRetrievalResult\n\nfrom app.exceptions import InternalServerError, KBNotFound\nfrom .models import KBRetrieveChunksRequest\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\n@router.post(\"/admin/knowledge_base/{kb_id}/chunks/retrieve\")\ndef retrieve_chunks(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    request: KBRetrieveChunksRequest,\n) -> ChunksRetrievalResult:\n    try:\n        vector_search_config = request.retrieval_config.vector_search\n        retriever = ChunkSimpleRetriever(\n            db_session=db_session,\n            knowledge_base_id=kb_id,\n            config=vector_search_config,\n        )\n        return retriever.retrieve_chunks(\n            request.query,\n        )\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/data_source/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/data_source/models.py",
    "content": "from pydantic import BaseModel, field_validator\n\nfrom app.models import DataSourceType\n\n\nclass KBDataSource(BaseModel):\n    \"\"\"\n    Represents a linked data source for a knowledge base.\n    \"\"\"\n\n    id: int\n    name: str\n    data_source_type: DataSourceType\n    config: dict | list\n\n\nclass KBDataSourceMutable(BaseModel):\n    name: str\n\n    @field_validator(\"name\")\n    def name_must_not_be_blank(cls, v: str) -> str:\n        if not v.strip():\n            raise ValueError(\"Please provide a name for the data source\")\n        return v\n\n\nclass KBDataSourceCreate(KBDataSourceMutable):\n    data_source_type: DataSourceType\n    config: dict | list\n\n\nclass KBDataSourceUpdate(KBDataSourceMutable):\n    pass\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/data_source/routes.py",
    "content": "import logging\n\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.admin_routes.knowledge_base.data_source.models import (\n    KBDataSourceUpdate,\n    KBDataSource,\n)\nfrom app.api.admin_routes.knowledge_base.models import KBDataSourceCreate\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.exceptions import InternalServerError, KBDataSourceNotFound, KBNotFound\nfrom app.models import DataSource\nfrom app.repositories import knowledge_base_repo\nfrom app.tasks.knowledge_base import (\n    import_documents_from_kb_datasource,\n    purge_kb_datasource_related_resources,\n)\n\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/datasources\")\ndef create_kb_datasource(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    create: KBDataSourceCreate,\n) -> KBDataSource:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        new_data_source = DataSource(\n            name=create.name,\n            description=\"\",\n            data_source_type=create.data_source_type,\n            config=create.config,\n        )\n        new_data_source = knowledge_base_repo.add_kb_datasource(\n            session, kb, new_data_source\n        )\n\n        import_documents_from_kb_datasource.delay(kb_id, new_data_source.id)\n\n        return new_data_source\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.error(\n            f\"Failed to create data source for knowledge base #{kb_id}: {e}\", exc_info=e\n        )\n        raise InternalServerError()\n\n\n@router.put(\"/admin/knowledge_bases/{kb_id}/datasources/{data_source_id}\")\ndef update_kb_datasource(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    data_source_id: int,\n    update: KBDataSourceUpdate,\n) -> KBDataSource:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n\n        data_source = kb.must_get_data_source_by_id(data_source_id)\n        data_source.name = update.name\n\n        session.add(data_source)\n        session.commit()\n        session.refresh(data_source)\n\n        return data_source\n    except KBNotFound as e:\n        raise e\n    except KBDataSourceNotFound as e:\n        raise e\n    except Exception as e:\n        logger.error(f\"Failed to update data source #{data_source_id}: {e}\", exc_info=e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/datasources/{data_source_id}\")\ndef get_kb_datasource(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    data_source_id: int,\n) -> KBDataSource:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        return kb.must_get_data_source_by_id(data_source_id)\n    except KBNotFound as e:\n        raise e\n    except KBDataSourceNotFound as e:\n        raise e\n    except Exception as e:\n        logger.error(f\"Failed to get data source #{data_source_id}: {e}\", exc_info=e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/datasources\")\ndef list_kb_datasources(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    params: Params = Depends(),\n) -> Page[KBDataSource]:\n    return knowledge_base_repo.list_kb_datasources(session, kb_id, params)\n\n\n@router.delete(\"/admin/knowledge_bases/{kb_id}/datasources/{data_source_id}\")\ndef remove_kb_datasource(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    data_source_id: int,\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        data_source = kb.must_get_data_source_by_id(data_source_id)\n\n        # Flag the data source to be deleted, it will be deleted completely by the background job.\n        knowledge_base_repo.remove_kb_datasource(session, kb, data_source)\n        session.commit()\n\n        purge_kb_datasource_related_resources.apply_async(\n            args=[kb_id, data_source_id], countdown=5\n        )\n\n        return {\"detail\": \"success\"}\n    except KBNotFound as e:\n        raise e\n    except KBDataSourceNotFound as e:\n        raise e\n    except Exception as e:\n        logger.error(\n            f\"Failed to remove data source #{data_source_id} from knowledge base #{kb_id}: {e}\",\n            exc_info=e,\n        )\n        raise InternalServerError()\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/document/models.py",
    "content": "from datetime import datetime\nfrom typing import Optional\nfrom uuid import UUID\n\nfrom pydantic import BaseModel, Field\n\nfrom app.api.admin_routes.models import DataSourceDescriptor, KnowledgeBaseDescriptor\nfrom app.models import DocIndexTaskStatus\nfrom app.types import MimeTypes\n\n\nclass DocumentFilters(BaseModel):\n    search: Optional[str] = Field(\n        description=\"The search string to filter documents by name or source URI.\",\n        default=None,\n    )\n    knowledge_base_id: Optional[int] = Field(\n        description=\"The knowledge base ID that the document belongs to.\",\n        default=None,\n    )\n    data_source_id: Optional[int] = Field(\n        description=\"The data source ID that the document belongs to.\",\n        default=None,\n    )\n    mime_type: Optional[MimeTypes] = Field(\n        description=\"The MIME type of the documents to filter by.\",\n        default=None,\n    )\n    index_status: Optional[DocIndexTaskStatus] = Field(\n        description=\"The status of the document index task to filter by.\",\n        default=None,\n    )\n    created_at: Optional[tuple[datetime, datetime]] = Field(\n        description=\"The time range when the document was created.\",\n        default=None,\n    )\n    updated_at: Optional[tuple[datetime, datetime]] = Field(\n        description=\"The time range when the document was last updated.\",\n        default=None,\n    )\n    last_modified_at: Optional[tuple[datetime, datetime]] = Field(\n        description=\"The time range when the document was last modified in the source system.\",\n        default=None,\n    )\n\n\nclass DocumentItem(BaseModel):\n    id: int\n    hash: str\n    name: str\n    content: str\n    mime_type: MimeTypes | None\n    source_uri: str | None\n    meta: dict | list | None\n    index_status: DocIndexTaskStatus | None\n    index_result: str | None\n    data_source: DataSourceDescriptor | None\n    knowledge_base: KnowledgeBaseDescriptor | None\n    last_modified_at: datetime\n    created_at: datetime\n    updated_at: datetime\n\n\nclass RebuildIndexResult(BaseModel):\n    reindex_document_ids: list[int] = Field(default_factory=list)\n    ignore_document_ids: list[int] = Field(default_factory=list)\n    reindex_chunk_ids: list[UUID] = Field(default_factory=list)\n    ignore_chunk_ids: list[UUID] = Field(default_factory=list)\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/document/routes.py",
    "content": "import logging\nfrom typing import Annotated\n\nfrom fastapi import APIRouter, Depends, Query, HTTPException\nfrom fastapi_pagination import Params, Page\nfrom sqlmodel import Session\n\nfrom app.api.admin_routes.knowledge_base.models import ChunkItem\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.models import Document\nfrom app.models.chunk import KgIndexStatus, get_kb_chunk_model\nfrom app.models.document import DocIndexTaskStatus\nfrom app.models.entity import get_kb_entity_model\nfrom app.models.relationship import get_kb_relationship_model\nfrom app.repositories import knowledge_base_repo, document_repo\nfrom app.repositories.chunk import ChunkRepo\nfrom app.api.admin_routes.knowledge_base.document.models import (\n    DocumentFilters,\n    DocumentItem,\n    RebuildIndexResult,\n)\nfrom app.exceptions import InternalServerError\nfrom app.repositories.graph import GraphRepo\nfrom app.tasks.build_index import build_index_for_document, build_kg_index_for_chunk\nfrom app.tasks.knowledge_base import stats_for_knowledge_base\n\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/documents\")\ndef list_kb_documents(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    filters: Annotated[DocumentFilters, Query()],\n    params: Params = Depends(),\n) -> Page[DocumentItem]:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        filters.knowledge_base_id = kb.id\n        return document_repo.paginate(\n            session=session,\n            filters=filters,\n            params=params,\n        )\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/documents/{doc_id}\")\ndef get_kb_document_by_id(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    doc_id: int,\n) -> Document:\n    try:\n        document = document_repo.must_get(session, doc_id)\n        assert document.knowledge_base_id == kb_id\n        return document\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/documents/{doc_id}/chunks\")\ndef list_kb_document_chunks(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    doc_id: int,\n) -> list[ChunkItem]:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        chunk_repo = ChunkRepo(get_kb_chunk_model(kb))\n        return chunk_repo.get_document_chunks(session, doc_id)\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.delete(\"/admin/knowledge_bases/{kb_id}/documents/{document_id}\")\ndef remove_kb_document(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    document_id: int,\n) -> RebuildIndexResult:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        doc = document_repo.must_get(session, document_id)\n        assert doc.knowledge_base_id == kb.id\n\n        chunk_model = get_kb_chunk_model(kb)\n        entity_model = get_kb_entity_model(kb)\n        relationship_model = get_kb_relationship_model(kb)\n\n        chunk_repo = ChunkRepo(chunk_model)\n        graph_repo = GraphRepo(entity_model, relationship_model, chunk_model)\n\n        graph_repo.delete_document_relationships(session, document_id)\n        logger.info(\n            f\"Deleted relationships generated by document #{document_id} successfully.\"\n        )\n\n        graph_repo.delete_orphaned_entities(session)\n        logger.info(\"Deleted orphaned entities successfully.\")\n\n        chunk_repo.delete_by_document(session, document_id)\n        logger.info(f\"Deleted chunks of document #{document_id} successfully.\")\n\n        session.delete(doc)\n        session.commit()\n\n        stats_for_knowledge_base.delay(kb_id)\n\n        return {\"detail\": \"success\"}\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(f\"Failed to remove document #{document_id}: {e}\")\n        raise InternalServerError()\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/documents/reindex\")\ndef rebuild_kb_documents_index(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    document_ids: list[int],\n    reindex_completed_task: bool = False,\n):\n    try:\n        return rebuild_kb_document_index_by_ids(\n            session, kb_id, document_ids, reindex_completed_task\n        )\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/documents/{doc_id}/reindex\")\ndef rebuild_kb_document_index(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    doc_id: int,\n    reindex_completed_task: bool = False,\n) -> RebuildIndexResult:\n    try:\n        document_ids = [doc_id]\n        return rebuild_kb_document_index_by_ids(\n            db_session, kb_id, document_ids, reindex_completed_task\n        )\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n\n\ndef rebuild_kb_document_index_by_ids(\n    db_session: Session,\n    kb_id: int,\n    document_ids: list[int],\n    reindex_completed_task: bool = False,\n) -> RebuildIndexResult:\n    kb = knowledge_base_repo.must_get(db_session, kb_id)\n    kb_chunk_repo = ChunkRepo(get_kb_chunk_model(kb))\n\n    # Retry failed vector index tasks.\n    documents = document_repo.fetch_by_ids(db_session, document_ids)\n    reindex_document_ids = []\n    ignore_document_ids = []\n\n    for doc in documents:\n        # TODO: check NOT_STARTED, PENDING, RUNNING\n        if doc.index_status != DocIndexTaskStatus.FAILED and not reindex_completed_task:\n            ignore_document_ids.append(doc.id)\n        else:\n            reindex_document_ids.append(doc.id)\n\n        doc.index_status = DocIndexTaskStatus.PENDING\n        db_session.add(doc)\n        db_session.commit()\n\n        build_index_for_document.delay(kb.id, doc.id)\n\n    # Retry failed kg index tasks.\n    chunks = kb_chunk_repo.fetch_by_document_ids(db_session, document_ids)\n    reindex_chunk_ids = []\n    ignore_chunk_ids = []\n    for chunk in chunks:\n        if chunk.index_status == KgIndexStatus.COMPLETED and not reindex_completed_task:\n            ignore_chunk_ids.append(chunk.id)\n            continue\n        else:\n            reindex_chunk_ids.append(chunk.id)\n\n        chunk.index_status = KgIndexStatus.PENDING\n        db_session.add(chunk)\n        db_session.commit()\n\n        build_kg_index_for_chunk.delay(kb.id, chunk.id)\n\n    return RebuildIndexResult(\n        reindex_document_ids=reindex_document_ids,\n        ignore_document_ids=ignore_document_ids,\n        reindex_chunk_ids=reindex_chunk_ids,\n        ignore_chunk_ids=ignore_chunk_ids,\n    )\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/graph/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/graph/knowledge/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/graph/knowledge/routes.py",
    "content": "from fastapi import HTTPException\nfrom starlette import status\n\nfrom app.api.admin_routes.knowledge_base.graph.models import (\n    KnowledgeRequest,\n    KnowledgeNeighborRequest,\n    KnowledgeChunkRequest,\n)\nfrom app.api.admin_routes.knowledge_base.graph.routes import router, logger\nfrom app.api.deps import SessionDep\nfrom app.exceptions import KBNotFound, InternalServerError\nfrom app.rag.knowledge_base.index_store import get_kb_tidb_graph_store\nfrom app.repositories import knowledge_base_repo\n\n\n# Experimental interface\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/graph/knowledge\")\ndef retrieve_knowledge(session: SessionDep, kb_id: int, request: KnowledgeRequest):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        graph_store = get_kb_tidb_graph_store(session, kb)\n        data = graph_store.retrieve_graph_data(\n            request.query,\n            request.top_k,\n            request.similarity_threshold,\n        )\n        return {\n            \"entities\": data[\"entities\"],\n            \"relationships\": data[\"relationships\"],\n        }\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/graph/knowledge/neighbors\")\ndef retrieve_knowledge_neighbors(\n    session: SessionDep, kb_id: int, request: KnowledgeNeighborRequest\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        graph_store = get_kb_tidb_graph_store(session, kb)\n        data = graph_store.retrieve_neighbors(\n            request.entities_ids,\n            request.query,\n            request.max_depth,\n            request.max_neighbors,\n            request.similarity_threshold,\n        )\n        return data\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/graph/knowledge/chunks\")\ndef retrieve_knowledge_chunks(\n    session: SessionDep, kb_id: int, request: KnowledgeChunkRequest\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        graph_store = get_kb_tidb_graph_store(session, kb)\n        data = graph_store.get_chunks_by_relationships(request.relationships_ids)\n        if not data:\n            raise HTTPException(\n                status_code=status.HTTP_404_NOT_FOUND,\n                detail=\"No chunks found for the given relationships\",\n            )\n        return data\n    except KBNotFound as e:\n        raise e\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/graph/models.py",
    "content": "from typing import List, Optional\nfrom pydantic import BaseModel, model_validator\n\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrieverConfig,\n)\n\n\nclass SynopsisEntityCreate(BaseModel):\n    name: str\n    description: str\n    topic: str\n    meta: dict\n    entities: List[int]\n\n    @model_validator(mode=\"after\")\n    def validate_entities(self):\n        if len(self.entities) == 0:\n            raise ValueError(\"Entities list should not be empty\")\n        return self\n\n\nclass EntityUpdate(BaseModel):\n    name: Optional[str] = None\n    description: Optional[str] = None\n    meta: Optional[dict] = None\n\n\nclass RelationshipUpdate(BaseModel):\n    description: Optional[str] = None\n    meta: Optional[dict] = None\n    weight: Optional[int] = None\n\n\nclass GraphSearchRequest(BaseModel):\n    query: str\n    include_meta: bool = True\n    depth: int = 2\n    with_degree: bool = True\n    relationship_meta_filters: dict = {}\n\n\n# Knowledge Graph Retrieval\n\n\nclass KBKnowledgeGraphRetrievalConfig(BaseModel):\n    knowledge_graph: KnowledgeGraphRetrieverConfig\n\n\nclass KBRetrieveKnowledgeGraphRequest(BaseModel):\n    query: str\n    llm_id: int\n    retrieval_config: KBKnowledgeGraphRetrievalConfig\n\n\n### Experimental\n\n\nclass KnowledgeRequest(BaseModel):\n    query: str\n    similarity_threshold: float = 0.55\n    top_k: int = 10\n\n\nclass KnowledgeNeighborRequest(BaseModel):\n    entities_ids: List[int]\n    query: str\n    max_depth: int = 1\n    max_neighbors: int = 20\n    similarity_threshold: float = 0.55\n\n\nclass KnowledgeChunkRequest(BaseModel):\n    relationships_ids: List[int]\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/graph/routes.py",
    "content": "import logging\nfrom typing import List\nimport json\n\nfrom fastapi import APIRouter, HTTPException, status\nfrom fastapi.responses import StreamingResponse\nfrom fastapi.encoders import jsonable_encoder\n\nfrom app.api.admin_routes.knowledge_base.graph.models import (\n    SynopsisEntityCreate,\n    EntityUpdate,\n    RelationshipUpdate,\n    KBRetrieveKnowledgeGraphRequest,\n    GraphSearchRequest,\n)\nfrom app.api.deps import SessionDep\nfrom app.exceptions import KBNotFound, InternalServerError\nfrom app.models import (\n    EntityPublic,\n    RelationshipPublic,\n)\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrievalResult,\n)\nfrom app.rag.knowledge_base.index_store import (\n    get_kb_tidb_graph_editor,\n    get_kb_tidb_graph_store,\n)\nfrom app.rag.retrievers.knowledge_graph.simple_retriever import (\n    KnowledgeGraphSimpleRetriever,\n)\nfrom app.repositories import knowledge_base_repo\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\n@router.get(\n    \"/admin/knowledge_bases/{kb_id}/graph/entities/search\",\n    response_model=List[EntityPublic],\n)\ndef search_similar_entities(\n    session: SessionDep, kb_id: int, query: str, top_k: int = 10\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        return tidb_graph_editor.search_similar_entities(session, query, top_k)\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.post(\n    \"/admin/knowledge_bases/{kb_id}/graph/entities/synopsis\",\n    response_model=EntityPublic,\n)\ndef create_synopsis_entity(\n    session: SessionDep, kb_id: int, request: SynopsisEntityCreate\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        return tidb_graph_editor.create_synopsis_entity(\n            session,\n            request.name,\n            request.description,\n            request.topic,\n            request.meta,\n            request.entities,\n        )\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.get(\n    \"/admin/knowledge_bases/{kb_id}/graph/entities/{entity_id}\",\n    response_model=EntityPublic,\n)\ndef get_entity(session: SessionDep, kb_id: int, entity_id: int):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        entity = tidb_graph_editor.get_entity(session, entity_id)\n        if not entity:\n            raise HTTPException(\n                status_code=status.HTTP_404_NOT_FOUND,\n                detail=\"Entity not found\",\n            )\n        return entity\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.put(\n    \"/admin/knowledge_bases/{kb_id}/graph/entities/{entity_id}\",\n    response_model=EntityPublic,\n)\ndef update_entity(\n    session: SessionDep, kb_id: int, entity_id: int, entity_update: EntityUpdate\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        old_entity = tidb_graph_editor.get_entity(session, entity_id)\n        if old_entity is None:\n            raise HTTPException(\n                status_code=status.HTTP_404_NOT_FOUND,\n                detail=\"Entity not found\",\n            )\n        entity = tidb_graph_editor.update_entity(\n            session, old_entity, entity_update.model_dump()\n        )\n        return entity\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/graph/entities/{entity_id}/subgraph\")\ndef get_entity_subgraph(session: SessionDep, kb_id: int, entity_id: int) -> dict:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        entity = tidb_graph_editor.get_entity(session, entity_id)\n        if entity is None:\n            raise HTTPException(\n                status_code=status.HTTP_404_NOT_FOUND,\n                detail=\"Entity not found\",\n            )\n        relationships, entities = tidb_graph_editor.get_entity_subgraph(session, entity)\n        return {\n            \"relationships\": relationships,\n            \"entities\": entities,\n        }\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\n    \"/admin/knowledge_bases/{kb_id}/graph/relationships/{relationship_id}\",\n    response_model=RelationshipPublic,\n)\ndef get_relationship(session: SessionDep, kb_id: int, relationship_id: int):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        relationship = tidb_graph_editor.get_relationship(session, relationship_id)\n        if relationship is None:\n            raise HTTPException(\n                status_code=status.HTTP_404_NOT_FOUND,\n                detail=\"Relationship not found\",\n            )\n        return relationship\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.put(\n    \"/admin/knowledge_bases/{kb_id}/graph/relationships/{relationship_id}\",\n    response_model=RelationshipPublic,\n)\ndef update_relationship(\n    session: SessionDep,\n    kb_id: int,\n    relationship_id: int,\n    relationship_update: RelationshipUpdate,\n):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)\n        old_relationship = tidb_graph_editor.get_relationship(session, relationship_id)\n        if old_relationship is None:\n            raise HTTPException(\n                status_code=status.HTTP_404_NOT_FOUND,\n                detail=\"Relationship not found\",\n            )\n        relationship = tidb_graph_editor.update_relationship(\n            session, old_relationship, relationship_update.model_dump()\n        )\n        return relationship\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/graph/retrieve\")\ndef retrieve_kb_knowledge_graph(\n    db_session: SessionDep, kb_id: int, request: KBRetrieveKnowledgeGraphRequest\n) -> KnowledgeGraphRetrievalResult:\n    try:\n        retriever = KnowledgeGraphSimpleRetriever(\n            db_session=db_session,\n            knowledge_base_id=kb_id,\n            config=request.retrieval_config.knowledge_graph,\n        )\n        knowledge_graph = retriever.retrieve_knowledge_graph(request.query)\n        return KnowledgeGraphRetrievalResult(\n            entities=knowledge_graph.entities,\n            relationships=knowledge_graph.relationships,\n        )\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/graph/search\", deprecated=True)\ndef legacy_search_graph(session: SessionDep, kb_id: int, request: GraphSearchRequest):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        graph_store = get_kb_tidb_graph_store(session, kb)\n        entities, relationships = graph_store.retrieve_with_weight(\n            request.query,\n            [],\n            request.depth,\n            request.include_meta,\n            request.with_degree,\n            request.relationship_meta_filters,\n        )\n        return {\n            \"entities\": entities,\n            \"relationships\": relationships,\n        }\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/graph/entire_graph\")\ndef get_entire_knowledge_graph(session: SessionDep, kb_id: int):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        graph_store = get_kb_tidb_graph_store(session, kb)\n        retrieved_kg = graph_store.get_entire_knowledge_graph()\n        return {\n            \"entities\": retrieved_kg.entities,\n            \"relationships\": retrieved_kg.relationships,\n        }\n\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        # TODO: throw InternalServerError\n        raise e\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/graph/entire_graph/stream\")\ndef stream_entire_knowledge_graph(session: SessionDep, kb_id: int):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        graph_store = get_kb_tidb_graph_store(session, kb)\n        \n        def generate():\n            for chunk in graph_store.stream_entire_knowledge_graph(chunk_size=5000):\n                yield f\"data: {json.dumps(jsonable_encoder(chunk))}\\n\\n\"\n            yield f\"data: {json.dumps({'type': 'complete'})}\\n\\n\"\n        \n        return StreamingResponse(\n            generate(),\n            media_type=\"text/event-stream\",\n            headers={\n                \"Cache-Control\": \"no-cache\",\n                \"Connection\": \"keep-alive\",\n                \"Access-Control-Allow-Origin\": \"*\",\n            }\n        )\n\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/models.py",
    "content": "from datetime import datetime\nfrom typing import Optional\nfrom uuid import UUID\nfrom pydantic import BaseModel, field_validator, Field\n\nfrom app.api.admin_routes.knowledge_base.data_source.models import (\n    KBDataSource,\n    KBDataSourceCreate,\n)\nfrom app.api.admin_routes.models import (\n    EmbeddingModelDescriptor,\n    LLMDescriptor,\n    UserDescriptor,\n)\nfrom app.exceptions import KBNoVectorIndexConfigured\nfrom app.models import KgIndexStatus\nfrom app.models.knowledge_base import IndexMethod, GeneralChunkingConfig, ChunkingConfig\n\n\nclass KnowledgeBaseCreate(BaseModel):\n    name: str\n    description: Optional[str] = None\n    index_methods: list[IndexMethod] = Field(\n        default_factory=lambda: [IndexMethod.VECTOR]\n    )\n    llm_id: Optional[int] = None\n    embedding_model_id: Optional[int] = None\n    chunking_config: ChunkingConfig = Field(default_factory=GeneralChunkingConfig)\n    data_sources: list[KBDataSourceCreate] = Field(default_factory=list)\n\n    @field_validator(\"name\")\n    def name_must_not_be_blank(cls, v: str) -> str:\n        if not v.strip():\n            raise ValueError(\"Please provide a name for the knowledge base\")\n        return v\n\n    @field_validator(\"index_methods\")\n    def index_methods_must_has_vector(cls, v: list[IndexMethod]) -> list[IndexMethod]:\n        # Notice: For now, knowledge base must be configured vector index method,\n        # we will remove this limit in the feature.\n        if IndexMethod.VECTOR not in v:\n            raise KBNoVectorIndexConfigured()\n        return v\n\n\nclass KnowledgeBaseUpdate(BaseModel):\n    name: Optional[str] = None\n    description: Optional[str] = None\n    chunking_config: Optional[ChunkingConfig] = None\n\n\nclass KnowledgeBaseDetail(BaseModel):\n    \"\"\"\n    Represents a detailed view of a knowledge base.\n    \"\"\"\n\n    id: int\n    name: str\n    description: Optional[str] = None\n    documents_total: int\n    data_sources_total: int\n    # Notice: By default, SQLModel will not serialize list type relationships.\n    # https://github.com/fastapi/sqlmodel/issues/37#issuecomment-2093607242\n    data_sources: list[KBDataSource]\n    chunking_config: Optional[ChunkingConfig] = None\n    index_methods: list[IndexMethod]\n    llm_id: int | None = None\n    llm: LLMDescriptor | None = None\n    embedding_model_id: int | None = None\n    embedding_model: EmbeddingModelDescriptor | None = None\n    creator: UserDescriptor | None = None\n    created_at: datetime | None = None\n    updated_at: datetime | None = None\n\n\nclass KnowledgeBaseItem(BaseModel):\n    \"\"\"\n    Represents a simplified view of a knowledge base for list display purposes.\n    \"\"\"\n\n    id: int\n    name: str\n    description: Optional[str] = None\n    documents_total: int\n    data_sources_total: int\n    index_methods: list[IndexMethod]\n    creator: UserDescriptor | None = None\n    created_at: datetime\n    updated_at: datetime\n\n\nclass VectorIndexError(BaseModel):\n    document_id: int\n    document_name: str\n    source_uri: str\n    error: str | None = None\n\n\nclass KGIndexError(BaseModel):\n    document_id: int\n    document_name: str\n    source_uri: str\n    chunk_id: UUID\n    error: str | None = None\n\n\nclass ChunkItem(BaseModel):\n    id: UUID\n    document_id: int\n    hash: str\n    text: str\n    meta: Optional[dict | list]\n    embedding: Optional[list[float]]\n    relations: Optional[dict | list]\n    source_uri: Optional[str]\n    index_status: Optional[KgIndexStatus]\n    index_result: Optional[str]\n    created_at: Optional[datetime]\n    updated_at: Optional[datetime]\n\n\nclass RetrievalRequest(BaseModel):\n    query: str\n    chat_engine: str = \"default\"\n    top_k: Optional[int] = 5\n"
  },
  {
    "path": "backend/app/api/admin_routes/knowledge_base/routes.py",
    "content": "import logging\n\nfrom fastapi import APIRouter, Depends, HTTPException\nfrom fastapi_pagination import Params, Page\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.rag.knowledge_base.index_store import (\n    init_kb_tidb_vector_store,\n    init_kb_tidb_graph_store,\n)\nfrom .models import (\n    KnowledgeBaseDetail,\n    KnowledgeBaseItem,\n    KnowledgeBaseCreate,\n    KnowledgeBaseUpdate,\n    VectorIndexError,\n    KGIndexError,\n)\nfrom app.exceptions import (\n    InternalServerError,\n    KBIsUsedByChatEngines,\n)\nfrom app.models import (\n    DataSource,\n    KnowledgeBase,\n)\nfrom app.repositories import (\n    embedding_model_repo,\n    llm_repo,\n    data_source_repo,\n    knowledge_base_repo,\n)\nfrom app.tasks import (\n    build_kg_index_for_chunk,\n    build_index_for_document,\n)\nfrom app.tasks.knowledge_base import (\n    import_documents_for_knowledge_base,\n    stats_for_knowledge_base,\n    purge_knowledge_base_related_resources,\n)\nfrom ..models import ChatEngineDescriptor\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\n@router.post(\"/admin/knowledge_bases\")\ndef create_knowledge_base(\n    session: SessionDep, user: CurrentSuperuserDep, create: KnowledgeBaseCreate\n) -> KnowledgeBaseDetail:\n    try:\n        data_sources = [\n            data_source_repo.create(\n                session,\n                DataSource(\n                    name=data_source.name,\n                    description=\"\",\n                    user_id=user.id,\n                    data_source_type=data_source.data_source_type,\n                    config=data_source.config,\n                ),\n            )\n            for data_source in create.data_sources\n        ]\n\n        if not create.llm_id:\n            create.llm_id = llm_repo.must_get_default(session).id\n\n        if not create.embedding_model_id:\n            create.embedding_model_id = embedding_model_repo.must_get_default(\n                session\n            ).id\n\n        knowledge_base = KnowledgeBase(\n            name=create.name,\n            description=create.description,\n            index_methods=create.index_methods,\n            llm_id=create.llm_id,\n            embedding_model_id=create.embedding_model_id,\n            chunking_config=create.chunking_config.model_dump(),\n            data_sources=data_sources,\n            created_by=user.id,\n            updated_by=user.id,\n        )\n        knowledge_base = knowledge_base_repo.create(session, knowledge_base)\n\n        # Ensure the knowledge-base corresponding table schema are initialized.\n        init_kb_tidb_vector_store(session, knowledge_base)\n        init_kb_tidb_graph_store(session, knowledge_base)\n\n        # Trigger import and index documents for knowledge base\n        import_documents_for_knowledge_base.delay(knowledge_base.id)\n\n        return knowledge_base\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases\")\ndef list_knowledge_bases(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: Params = Depends(),\n) -> Page[KnowledgeBaseItem]:\n    return knowledge_base_repo.paginate(session, params)\n\n\n@router.get(\"/admin/knowledge_bases/{knowledge_base_id}\")\ndef get_knowledge_base(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    knowledge_base_id: int,\n) -> KnowledgeBaseDetail:\n    try:\n        return knowledge_base_repo.must_get(session, knowledge_base_id)\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.put(\"/admin/knowledge_bases/{knowledge_base_id}\")\ndef update_knowledge_base_setting(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    knowledge_base_id: int,\n    update: KnowledgeBaseUpdate,\n) -> KnowledgeBaseDetail:\n    try:\n        knowledge_base = knowledge_base_repo.must_get(session, knowledge_base_id)\n        knowledge_base = knowledge_base_repo.update(session, knowledge_base, update)\n        return knowledge_base\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/linked_chat_engines\")\ndef list_kb_linked_chat_engines(\n    session: SessionDep, user: CurrentSuperuserDep, kb_id: int\n) -> list[ChatEngineDescriptor]:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        return knowledge_base_repo.list_linked_chat_engines(session, kb.id)\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.delete(\"/admin/knowledge_bases/{kb_id}\")\ndef delete_knowledge_base(session: SessionDep, user: CurrentSuperuserDep, kb_id: int):\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n\n        # Check if the knowledge base has linked chat engines.\n        linked_chat_engines = knowledge_base_repo.list_linked_chat_engines(\n            session, kb.id\n        )\n        if len(linked_chat_engines) > 0:\n            raise KBIsUsedByChatEngines(kb_id, len(linked_chat_engines))\n\n        # Delete knowledge base.\n        knowledge_base_repo.delete(session, kb)\n\n        # Trigger purge knowledge base related resources after 5 seconds.\n        purge_knowledge_base_related_resources.apply_async(args=[kb_id], countdown=5)\n\n        return {\"detail\": f\"Knowledge base #{kb_id} is deleted successfully\"}\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{knowledge_base_id}/overview\")\ndef get_knowledge_base_index_overview(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    knowledge_base_id: int,\n) -> dict:\n    try:\n        knowledge_base = knowledge_base_repo.must_get(session, knowledge_base_id)\n\n        stats_for_knowledge_base.delay(knowledge_base.id)\n\n        return knowledge_base_repo.get_index_overview(session, knowledge_base)\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/vector-index-errors\")\ndef list_kb_vector_index_errors(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    params: Params = Depends(),\n) -> Page[VectorIndexError]:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        return knowledge_base_repo.list_vector_index_built_errors(session, kb, params)\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/knowledge_bases/{kb_id}/kg-index-errors\")\ndef list_kb_kg_index_errors(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n    params: Params = Depends(),\n) -> Page[KGIndexError]:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n        return knowledge_base_repo.list_kg_index_built_errors(session, kb, params)\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.post(\"/admin/knowledge_bases/{kb_id}/retry-failed-index-tasks\")\ndef retry_failed_tasks(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    kb_id: int,\n) -> dict:\n    try:\n        kb = knowledge_base_repo.must_get(session, kb_id)\n\n        # Retry failed vector index tasks.\n        document_ids = knowledge_base_repo.set_failed_documents_status_to_pending(\n            session, kb\n        )\n        for document_id in document_ids:\n            build_index_for_document.delay(kb_id, document_id)\n        logger.info(f\"Triggered {len(document_ids)} documents to rebuilt vector index.\")\n\n        # Retry failed kg index tasks.\n        chunk_ids = knowledge_base_repo.set_failed_chunks_status_to_pending(session, kb)\n        for chunk_id in chunk_ids:\n            build_kg_index_for_chunk.delay(kb_id, chunk_id)\n        logger.info(\n            f\"Triggered {len(chunk_ids)} chunks to rebuilt knowledge graph index.\"\n        )\n\n        return {\n            \"detail\": f\"Triggered reindex {len(document_ids)} documents and {len(chunk_ids)} chunks of knowledge base #{kb_id}.\",\n            \"reindex_document_ids\": document_ids,\n            \"reindex_chunk_ids\": chunk_ids,\n        }\n    except HTTPException:\n        raise\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n"
  },
  {
    "path": "backend/app/api/admin_routes/langfuse.py",
    "content": "import logging\nfrom pydantic import BaseModel\n\nfrom fastapi import APIRouter\nfrom langfuse import Langfuse\n\nfrom app.api.deps import CurrentSuperuserDep\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\nclass LangfuseSetting(BaseModel):\n    host: str = \"https://us.cloud.langfuse.com\"\n    public_key: str\n    secret_key: str\n\n\nclass LangfuseTestResult(BaseModel):\n    success: bool\n    error: str = \"\"\n\n\n@router.post(\"/admin/langfuse/test\")\ndef test_langfuse(\n    user: CurrentSuperuserDep,\n    request: LangfuseSetting,\n) -> LangfuseTestResult:\n    try:\n        lf = Langfuse(\n            host=request.host,\n            secret_key=request.secret_key,\n            public_key=request.public_key,\n        )\n        success = lf.auth_check()\n        if not success:\n            error = \"Langfuse authentication failed, please check public_key, secret_key and host.\"\n        else:\n            error = \"\"\n    except Exception as e:\n        success = False\n        error = str(e)\n    return LangfuseTestResult(success=success, error=error)\n"
  },
  {
    "path": "backend/app/api/admin_routes/legacy_retrieve.py",
    "content": "import logging\nfrom typing import Optional, List\n\nfrom fastapi import APIRouter\nfrom sqlmodel import Session\nfrom app.models import Document\nfrom app.api.admin_routes.models import ChatEngineBasedRetrieveRequest\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom llama_index.core.schema import NodeWithScore\n\nfrom app.exceptions import InternalServerError, KBNotFound\nfrom app.rag.chat.config import ChatEngineConfig\nfrom app.rag.chat.retrieve.retrieve_flow import RetrieveFlow\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\ndef get_override_engine_config(\n    db_session: Session,\n    engine_name: str,\n    # Override chat engine config.\n    top_k: Optional[int] = None,\n    similarity_top_k: Optional[int] = None,\n    oversampling_factor: Optional[int] = None,\n    refine_question_with_kg: Optional[bool] = None,\n) -> ChatEngineConfig:\n    engine_config = ChatEngineConfig.load_from_db(db_session, engine_name)\n    if similarity_top_k is not None:\n        engine_config.vector_search.similarity_top_k = similarity_top_k\n    if oversampling_factor is not None:\n        engine_config.vector_search.oversampling_factor = oversampling_factor\n    if top_k is not None:\n        engine_config.vector_search.top_k = top_k\n    if refine_question_with_kg is not None:\n        engine_config.refine_question_with_kg = refine_question_with_kg\n    return engine_config\n\n\n@router.get(\"/admin/retrieve/documents\", deprecated=True)\ndef legacy_retrieve_documents(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    question: str,\n    chat_engine: str = \"default\",\n    # Override chat engine config.\n    top_k: Optional[int] = 5,\n    similarity_top_k: Optional[int] = None,\n    oversampling_factor: Optional[int] = 5,\n    refine_question_with_kg: Optional[bool] = True,\n) -> List[Document]:\n    try:\n        engine_config = get_override_engine_config(\n            db_session=session,\n            engine_name=chat_engine,\n            top_k=top_k,\n            similarity_top_k=similarity_top_k,\n            oversampling_factor=oversampling_factor,\n            refine_question_with_kg=refine_question_with_kg,\n        )\n        retriever = RetrieveFlow(\n            db_session=session,\n            engine_name=chat_engine,\n            engine_config=engine_config,\n        )\n        return retriever.retrieve_documents(question)\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/admin/embedding_retrieve\", deprecated=True)\ndef legacy_retrieve_chunks(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    question: str,\n    chat_engine: str = \"default\",\n    # Override chat engine config.\n    top_k: Optional[int] = 5,\n    similarity_top_k: Optional[int] = None,\n    oversampling_factor: Optional[int] = 5,\n    refine_question_with_kg=False,\n) -> List[NodeWithScore]:\n    try:\n        engine_config = get_override_engine_config(\n            db_session=session,\n            engine_name=chat_engine,\n            top_k=top_k,\n            similarity_top_k=similarity_top_k,\n            oversampling_factor=oversampling_factor,\n            refine_question_with_kg=refine_question_with_kg,\n        )\n        retriever = RetrieveFlow(\n            db_session=session,\n            engine_name=chat_engine,\n            engine_config=engine_config,\n        )\n        return retriever.retrieve(question)\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.post(\"/admin/embedding_retrieve\", deprecated=True)\ndef legacy_retrieve_chunks_2(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    request: ChatEngineBasedRetrieveRequest,\n) -> List[NodeWithScore]:\n    try:\n        engine_config = get_override_engine_config(\n            db_session=session,\n            engine_name=request.chat_engine,\n            top_k=request.top_k,\n            similarity_top_k=request.similarity_top_k,\n            oversampling_factor=request.oversampling_factor,\n            refine_question_with_kg=request.refine_question_with_kg,\n        )\n        retriever = RetrieveFlow(\n            db_session=session,\n            engine_name=request.chat_engine,\n            engine_config=engine_config,\n        )\n        return retriever.retrieve(request.query)\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n"
  },
  {
    "path": "backend/app/api/admin_routes/llm/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/llm/routes.py",
    "content": "from typing import List\n\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page, Params\nfrom llama_index.core.base.llms.types import ChatMessage\nfrom pydantic import BaseModel\n\nfrom app.api.deps import CurrentSuperuserDep, SessionDep\nfrom app.logger import logger\nfrom app.models import AdminLLM, LLM, LLMUpdate\nfrom app.rag.llms.provider import LLMProviderOption, llm_provider_options\nfrom app.rag.llms.resolver import resolve_llm\nfrom app.repositories.llm import llm_repo\n\n\nrouter = APIRouter()\n\n\n@router.get(\"/admin/llms/providers/options\")\ndef list_llm_provider_options(user: CurrentSuperuserDep) -> List[LLMProviderOption]:\n    return llm_provider_options\n\n\n@router.get(\"/admin/llms\")\ndef list_llms(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: Params = Depends(),\n) -> Page[AdminLLM]:\n    return llm_repo.paginate(db_session, params)\n\n\nclass LLMTestResult(BaseModel):\n    success: bool\n    error: str = \"\"\n\n\n@router.post(\"/admin/llms/test\")\ndef test_llm(\n    db_llm: LLM,\n    user: CurrentSuperuserDep,\n) -> LLMTestResult:\n    try:\n        llm = resolve_llm(\n            provider=db_llm.provider,\n            model=db_llm.model,\n            config=db_llm.config,\n            credentials=db_llm.credentials,\n        )\n        llm.chat([ChatMessage(role=\"user\", content=\"Who are you?\")])\n\n        # Test with dspy LM.\n        import dspy\n        from app.rag.llms.dspy import get_dspy_lm_by_llama_llm\n\n        dspy_lm = get_dspy_lm_by_llama_llm(llm)\n        with dspy.context(lm=dspy_lm):\n            math = dspy.Predict(\"question -> answer: float\")\n            prediction = math(question=\"1 + 1 = ?\")\n            assert prediction.answer == 2\n\n        success = True\n        error = \"\"\n    except Exception as e:\n        logger.info(f\"Failed to test LLM: {e}\")\n        success = False\n        error = str(e)\n    return LLMTestResult(success=success, error=error)\n\n\n@router.post(\"/admin/llms\")\ndef create_llm(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    llm: LLM,\n) -> AdminLLM:\n    return llm_repo.create(db_session, llm)\n\n\n@router.get(\"/admin/llms/{llm_id}\")\ndef get_llm(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    llm_id: int,\n) -> AdminLLM:\n    return llm_repo.must_get(db_session, llm_id)\n\n\n@router.put(\"/admin/llms/{llm_id}\")\ndef update_llm(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    llm_id: int,\n    llm_update: LLMUpdate,\n) -> AdminLLM:\n    llm = llm_repo.must_get(db_session, llm_id)\n    return llm_repo.update(db_session, llm, llm_update)\n\n\n@router.delete(\"/admin/llms/{llm_id}\")\ndef delete_llm(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    llm_id: int,\n) -> None:\n    llm = llm_repo.must_get(db_session, llm_id)\n    llm_repo.delete(db_session, llm)\n\n\n@router.put(\"/admin/llms/{llm_id}/set_default\")\ndef set_default_llm(\n    db_session: SessionDep, user: CurrentSuperuserDep, llm_id: int\n) -> AdminLLM:\n    llm = llm_repo.must_get(db_session, llm_id)\n    return llm_repo.set_default(db_session, llm)\n"
  },
  {
    "path": "backend/app/api/admin_routes/models.py",
    "content": "from uuid import UUID\nfrom typing import Optional\nfrom pydantic import BaseModel\n\nfrom app.api.admin_routes.embedding_model.models import EmbeddingModelItem\nfrom app.rag.llms.provider import LLMProvider\n\n\nclass LLMDescriptor(BaseModel):\n    id: int\n    name: str\n    provider: LLMProvider\n    model: str\n    is_default: bool\n\n\nclass EmbeddingModelDescriptor(EmbeddingModelItem):\n    pass\n\n\nclass UserDescriptor(BaseModel):\n    id: UUID\n    email: str\n\n\nclass KnowledgeBaseDescriptor(BaseModel):\n    id: int\n    name: str\n\n    def __hash__(self):\n        return hash(self.id)\n\n\nclass DataSourceDescriptor(BaseModel):\n    id: int\n    name: str\n\n\nclass ChatEngineDescriptor(BaseModel):\n    id: int\n    name: str\n    is_default: bool\n\n\nclass ChatEngineBasedRetrieveRequest(BaseModel):\n    query: str\n    chat_engine: Optional[str] = \"default\"\n    top_k: Optional[int] = 5\n    similarity_top_k: Optional[int] = None\n    oversampling_factor: Optional[int] = 5\n    refine_question_with_kg: Optional[bool] = False\n"
  },
  {
    "path": "backend/app/api/admin_routes/reranker_model/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/admin_routes/reranker_model/routes.py",
    "content": "from typing import List\n\nfrom fastapi import Depends, APIRouter\nfrom fastapi_pagination import Params, Page\nfrom pydantic import BaseModel\nfrom llama_index.core.schema import NodeWithScore, TextNode\n\nfrom app.api.admin_routes.llm.routes import LLMTestResult\nfrom app.api.deps import CurrentSuperuserDep, SessionDep\nfrom app.models import RerankerModel, AdminRerankerModel\nfrom app.models.reranker_model import RerankerModelUpdate\nfrom app.repositories.reranker_model import reranker_model_repo\nfrom app.rag.rerankers.provider import RerankerProviderOption, reranker_provider_options\nfrom app.rag.rerankers.resolver import resolve_reranker\n\nfrom app.logger import logger\n\n\nrouter = APIRouter()\n\n\n@router.get(\"/admin/reranker-models/providers/options\")\ndef list_reranker_model_provider_options(\n    user: CurrentSuperuserDep,\n) -> List[RerankerProviderOption]:\n    return reranker_provider_options\n\n\n@router.get(\"/admin/reranker-models\")\ndef list_reranker_models(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    params: Params = Depends(),\n) -> Page[AdminRerankerModel]:\n    return reranker_model_repo.paginate(db_session, params)\n\n\nclass RerankerModelTestResult(BaseModel):\n    success: bool\n    error: str = \"\"\n\n\n@router.post(\"/admin/reranker-models/test\")\ndef test_reranker_model(\n    db_reranker_model: RerankerModel, user: CurrentSuperuserDep\n) -> LLMTestResult:\n    try:\n        reranker = resolve_reranker(\n            provider=db_reranker_model.provider,\n            model=db_reranker_model.model,\n            # for testing purpose, we only rerank 2 nodes\n            top_n=2,\n            config=db_reranker_model.config,\n            credentials=db_reranker_model.credentials,\n        )\n        reranked_nodes = reranker.postprocess_nodes(\n            nodes=[\n                NodeWithScore(\n                    node=TextNode(\n                        text=\"TiDB is a distributed SQL database.\",\n                    ),\n                    score=0.8,\n                ),\n                NodeWithScore(\n                    node=TextNode(\n                        text=\"TiKV is a distributed key-value storage engine.\",\n                    ),\n                    score=0.6,\n                ),\n                NodeWithScore(\n                    node=TextNode(\n                        text=\"TiFlash is a columnar storage engine.\",\n                    ),\n                    score=0.4,\n                ),\n            ],\n            query_str=\"What is TiDB?\",\n        )\n        if len(reranked_nodes) != 2:\n            raise ValueError(\"expected 2 nodes, but got %d\", len(reranked_nodes))\n        success = True\n        error = \"\"\n    except Exception as e:\n        logger.info(f\"Failed to test reranker model: {e}\")\n        success = False\n        error = str(e)\n    return RerankerModelTestResult(success=success, error=error)\n\n\n@router.post(\"/admin/reranker-models\")\ndef create_reranker_model(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    reranker_model: RerankerModel,\n) -> AdminRerankerModel:\n    return reranker_model_repo.create(db_session, reranker_model)\n\n\n@router.get(\"/admin/reranker-models/{model_id}\")\ndef get_reranker_model(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    model_id: int,\n) -> AdminRerankerModel:\n    return reranker_model_repo.must_get(db_session, model_id)\n\n\n@router.put(\"/admin/reranker-models/{model_id}\")\ndef update_reranker_model(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    model_id: int,\n    model_update: RerankerModelUpdate,\n) -> AdminRerankerModel:\n    reranker_model = reranker_model_repo.must_get(db_session, model_id)\n    return reranker_model_repo.update(db_session, reranker_model, model_update)\n\n\n@router.delete(\"/admin/reranker-models/{model_id}\")\ndef delete_reranker_model(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    model_id: int,\n) -> None:\n    reranker_model = reranker_model_repo.must_get(db_session, model_id)\n    reranker_model_repo.delete(db_session, reranker_model)\n\n\n@router.put(\"/admin/reranker-models/{model_id}/set_default\")\ndef set_default_reranker_model(\n    db_session: SessionDep, user: CurrentSuperuserDep, model_id: int\n) -> AdminRerankerModel:\n    reranker_model = reranker_model_repo.must_get(db_session, model_id)\n    return reranker_model_repo.set_default(db_session, reranker_model)\n"
  },
  {
    "path": "backend/app/api/admin_routes/semantic_cache.py",
    "content": "from typing import Optional, Dict\nimport time\nimport logging\n\nfrom fastapi import APIRouter, Body\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.rag.chat.config import ChatEngineConfig\nfrom app.rag.semantic_cache import SemanticCacheManager, SemanticItem\n\nrouter = APIRouter()\n\nlogger = logging.getLogger(__name__)\n\n\n@router.post(\"/admin/semantic_cache\")\nasync def add_semantic_cache(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    question: str,\n    answer: str,\n    namespace: str = \"default\",\n    chat_engine: str = \"default\",\n    metadata: Optional[dict] = Body(None),\n) -> Dict:\n    chat_engine_config = ChatEngineConfig.load_from_db(session, chat_engine)\n    _dspy_lm = chat_engine_config.get_dspy_lm(session)\n\n    scm = SemanticCacheManager(\n        dspy_llm=_dspy_lm,\n    )\n\n    try:\n        scm.add_cache(\n            session,\n            item=SemanticItem(question=question, answer=answer),\n            namespace=namespace,\n            metadata=metadata,\n        )\n    except Exception as e:\n        return {\n            \"status\": \"failed\",\n            \"message\": str(e),\n        }\n\n    return {\n        \"status\": \"success\",\n    }\n\n\n@router.get(\"/admin/semantic_cache\")\nasync def search_semantic_cache(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    query: str,\n    namespace: str = \"default\",\n    chat_engine: str = \"default\",\n) -> Dict:\n    start_time = time.time()\n    chat_engine_config = ChatEngineConfig.load_from_db(session, chat_engine)\n    _dspy_lm = chat_engine_config.get_dspy_lm(session)\n    logger.debug(\n        f\"[search_semantic_cache] Loading dspy_lm took {time.time() - start_time:.2f} seconds\"\n    )\n\n    scm = SemanticCacheManager(\n        dspy_llm=_dspy_lm,\n    )\n\n    start_time = time.time()\n    response = scm.search(\n        session=session,\n        query=query,\n        namespace=namespace,\n    )\n    logger.debug(\n        f\"[search_semantic_cache] Searching semantic cache took {time.time() - start_time:.2f} seconds\"\n    )\n    return response\n"
  },
  {
    "path": "backend/app/api/admin_routes/site_setting.py",
    "content": "from typing import Dict\nfrom pydantic import BaseModel\nfrom http import HTTPStatus\nfrom fastapi import APIRouter, HTTPException\n\nfrom app.api.deps import CurrentSuperuserDep, SessionDep\nfrom app.site_settings import SiteSetting, SettingValue, SettingType\n\nrouter = APIRouter()\n\n\n@router.get(\"/admin/site-settings\", response_model=Dict[str, SettingValue])\ndef site_settings(user: CurrentSuperuserDep):\n    return SiteSetting.get_all_settings(force_check_db_cache=True)\n\n\nclass SettingUpdate(BaseModel):\n    value: SettingType\n\n\n@router.put(\n    \"/admin/site-settings/{setting_name}\",\n    status_code=HTTPStatus.NO_CONTENT,\n    responses={\n        HTTPStatus.BAD_REQUEST: {\n            \"content\": {\n                \"application/json\": {\n                    \"examples\": {\n                        \"invalid_data_type\": {\n                            \"summary\": \"Invalid data type\",\n                            \"value\": {\"detail\": \"title must be of type `str`\"},\n                        },\n                    }\n                }\n            },\n        },\n        HTTPStatus.NOT_FOUND: {\n            \"content\": {\n                \"application/json\": {\n                    \"examples\": {\n                        \"setting_not_found\": {\n                            \"summary\": \"Setting not found\",\n                            \"value\": {\"detail\": \"Setting not found\"},\n                        },\n                    }\n                }\n            },\n        },\n    },\n)\ndef update_site_setting(\n    session: SessionDep,\n    user: CurrentSuperuserDep,\n    setting_name: str,\n    request: SettingUpdate,\n):\n    if not SiteSetting.setting_exists(setting_name):\n        raise HTTPException(\n            status_code=HTTPStatus.NOT_FOUND, detail=\"Setting not found\"\n        )\n\n    try:\n        SiteSetting.update_setting(session, setting_name, request.value)\n    except ValueError as e:\n        raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail=str(e))\n"
  },
  {
    "path": "backend/app/api/admin_routes/stats.py",
    "content": "from datetime import date\nfrom pydantic import BaseModel\nfrom fastapi import APIRouter\nfrom app.api.deps import CurrentSuperuserDep, SessionDep\nfrom app.repositories import chat_repo\n\n\nrouter = APIRouter()\n\n\nclass DateRangeStats(BaseModel):\n    start_date: date\n    end_date: date\n\n\nclass ChatStats(DateRangeStats):\n    values: list\n\n\n@router.get(\"/admin/stats/trend/chat-user\")\ndef chat_count_trend(\n    session: SessionDep, user: CurrentSuperuserDep, start_date: date, end_date: date\n) -> ChatStats:\n    stats = chat_repo.chat_trend_by_user(session, start_date, end_date)\n    return ChatStats(start_date=start_date, end_date=end_date, values=stats)\n\n\n@router.get(\"/admin/stats/trend/chat-origin\")\ndef chat_origin_trend(\n    session: SessionDep, user: CurrentSuperuserDep, start_date: date, end_date: date\n) -> ChatStats:\n    stats = chat_repo.chat_trend_by_origin(session, start_date, end_date)\n    return ChatStats(start_date=start_date, end_date=end_date, values=stats)\n"
  },
  {
    "path": "backend/app/api/admin_routes/upload.py",
    "content": "import os\nimport time\nfrom typing import List\nfrom fastapi import APIRouter, UploadFile, HTTPException, status\n\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.file_storage import default_file_storage\nfrom app.utils.uuid6 import uuid7\nfrom app.models import Upload\nfrom app.types import MimeTypes\nfrom app.site_settings import SiteSetting\n\nrouter = APIRouter()\n\n\nSUPPORTED_FILE_TYPES = {\n    \".txt\": MimeTypes.PLAIN_TXT,\n    \".md\": MimeTypes.MARKDOWN,\n    \".pdf\": MimeTypes.PDF,\n    \".docx\": MimeTypes.DOCX,\n    \".pptx\": MimeTypes.PPTX,\n    \".xlsx\": MimeTypes.XLSX,\n    \".csv\": MimeTypes.CSV,\n}\n\n\n@router.post(\"/admin/uploads\")\ndef upload_files(\n    session: SessionDep, user: CurrentSuperuserDep, files: List[UploadFile]\n) -> List[Upload]:\n    uploads = []\n    for file in files:\n        if not file.filename:\n            raise HTTPException(\n                status_code=status.HTTP_400_BAD_REQUEST,\n                detail=\"File name cannot be empty\",\n            )\n        sys_max_upload_file_size = SiteSetting.max_upload_file_size\n        if file.size > sys_max_upload_file_size:\n            upload_file_size_in_mb = file.size / 1024 / 1024\n            max_upload_file_size_in_mb = sys_max_upload_file_size / 1024 / 1024\n            raise HTTPException(\n                status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,\n                detail=\"The upload file size ({:.2f} MiB) exceeds maximum allowed size ({:.2f} MiB)\".format(\n                    upload_file_size_in_mb, max_upload_file_size_in_mb\n                ),\n            )\n\n        file_ext = os.path.splitext(file.filename)[1].lower()\n        if file_ext not in SUPPORTED_FILE_TYPES:\n            raise HTTPException(\n                status_code=status.HTTP_400_BAD_REQUEST,\n                detail=f\"File type {file_ext} not supported. Supported types: {SUPPORTED_FILE_TYPES.keys()}\",\n            )\n        file_path = f\"uploads/{user.id.hex}/{int(time.time())}-{uuid7().hex}{file_ext}\"\n        default_file_storage.save(file_path, file.file)\n        uploads.append(\n            Upload(\n                name=file.filename,\n                size=default_file_storage.size(file_path),\n                path=file_path,\n                mime_type=SUPPORTED_FILE_TYPES[file_ext],\n                user_id=user.id,\n            )\n        )\n    session.add_all(uploads)\n    session.commit()\n    return uploads\n"
  },
  {
    "path": "backend/app/api/admin_routes/user.py",
    "content": "from typing import Optional\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page, Params\n\nfrom app.repositories.user import user_repo\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.api.admin_routes.models import (\n    UserDescriptor,\n)\n\nrouter = APIRouter(\n    prefix=\"/admin/users\",\n    tags=[\"admin/users\"],\n)\n\n\n@router.get(\"/search\")\ndef search_users(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    search: Optional[str] = None,\n    params: Params = Depends(),\n) -> Page[UserDescriptor]:\n    return user_repo.search_users(db_session, search, params)\n"
  },
  {
    "path": "backend/app/api/deps.py",
    "content": "from typing import Annotated\nfrom fastapi import Depends\nfrom sqlmodel import Session\nfrom sqlmodel.ext.asyncio.session import AsyncSession\n\nfrom app.core.db import get_db_session, get_db_async_session\nfrom app.models import User\nfrom app.auth.users import (\n    current_user,\n    current_superuser,\n    optional_current_user,\n)\n\n\nSessionDep = Annotated[Session, Depends(get_db_session)]\nAsyncSessionDep = Annotated[AsyncSession, Depends(get_db_async_session)]\n\n# Dependency for current user, it will return None if user is not authenticated\nOptionalUserDep = Annotated[User | None, Depends(optional_current_user)]\n\n# Dependencies for current user and superuser, it will return 401 if user is not authenticated\nCurrentUserDep = Annotated[User, Depends(current_user)]\nCurrentSuperuserDep = Annotated[User, Depends(current_superuser)]\n"
  },
  {
    "path": "backend/app/api/main.py",
    "content": "from fastapi import APIRouter\nfrom app.api.routes import (\n    chat_engine,\n    index,\n    chat,\n    user,\n    api_key,\n    feedback,\n    document,\n)\nfrom app.api.admin_routes.knowledge_base.routes import (\n    router as admin_knowledge_base_router,\n)\nfrom app.api.admin_routes.knowledge_base.graph.routes import (\n    router as admin_kb_graph_router,\n)\nfrom app.api.admin_routes.knowledge_base.graph.knowledge.routes import (\n    router as admin_kb_graph_knowledge_router,\n)\nfrom app.api.admin_routes.knowledge_base.data_source.routes import (\n    router as admin_kb_data_source_router,\n)\nfrom app.api.admin_routes.knowledge_base.document.routes import (\n    router as admin_kb_document_router,\n)\nfrom app.api.admin_routes.knowledge_base.chunk.routes import (\n    router as admin_kb_chunk_router,\n)\nfrom app.api.admin_routes.document.routes import router as admin_document_router\nfrom app.api.admin_routes.llm.routes import router as admin_llm_router\nfrom app.api.admin_routes.embedding_model.routes import (\n    router as admin_embedding_model_router,\n)\nfrom app.api.admin_routes.reranker_model.routes import (\n    router as admin_reranker_model_router,\n)\nfrom app.api.admin_routes.chat.routes import router as admin_user_router\nfrom app.api.admin_routes import (\n    chat_engine as admin_chat_engine,\n    feedback as admin_feedback,\n    legacy_retrieve as admin_legacy_retrieve,\n    site_setting as admin_site_settings,\n    upload as admin_upload,\n    stats as admin_stats,\n    semantic_cache as admin_semantic_cache,\n    langfuse as admin_langfuse,\n    user as admin_user,\n)\nfrom app.api.admin_routes.evaluation import (\n    evaluation_task as admin_evaluation_task,\n    evaluation_dataset as admin_evaluation_dataset,\n)\nfrom app.api.routes.retrieve import (\n    routes as retrieve_routes,\n)\n\nfrom app.auth.users import auth_backend, fastapi_users\n\napi_router = APIRouter()\napi_router.include_router(index.router, tags=[\"index\"])\napi_router.include_router(chat.router, tags=[\"chat\"])\napi_router.include_router(feedback.router, tags=[\"chat\"])\napi_router.include_router(user.router, tags=[\"user\"])\napi_router.include_router(api_key.router, tags=[\"auth\"])\napi_router.include_router(document.router, tags=[\"documents\"])\napi_router.include_router(chat_engine.router, tags=[\"chat-engines\"])\napi_router.include_router(retrieve_routes.router, tags=[\"retrieve\"])\napi_router.include_router(admin_user_router)\napi_router.include_router(admin_chat_engine.router, tags=[\"admin/chat-engines\"])\napi_router.include_router(admin_document_router, tags=[\"admin/documents\"])\napi_router.include_router(admin_feedback.router)\napi_router.include_router(admin_site_settings.router, tags=[\"admin/site_settings\"])\napi_router.include_router(admin_upload.router, tags=[\"admin/upload\"])\napi_router.include_router(admin_knowledge_base_router, tags=[\"admin/knowledge_base\"])\napi_router.include_router(admin_kb_graph_router, tags=[\"admin/knowledge_base/graph\"])\napi_router.include_router(\n    admin_kb_graph_knowledge_router, tags=[\"admin/knowledge_base/graph/knowledge\"]\n)\napi_router.include_router(\n    admin_kb_data_source_router, tags=[\"admin/knowledge_base/data_source\"]\n)\napi_router.include_router(\n    admin_kb_document_router, tags=[\"admin/knowledge_base/document\"]\n)\napi_router.include_router(admin_kb_chunk_router, tags=[\"admin/knowledge_base/chunk\"])\napi_router.include_router(admin_llm_router, tags=[\"admin/llm\"])\napi_router.include_router(admin_embedding_model_router, tags=[\"admin/embedding_model\"])\napi_router.include_router(admin_reranker_model_router, tags=[\"admin/reranker_model\"])\napi_router.include_router(admin_langfuse.router, tags=[\"admin/langfuse\"])\napi_router.include_router(admin_legacy_retrieve.router, tags=[\"admin/retrieve_old\"])\napi_router.include_router(admin_stats.router, tags=[\"admin/stats\"])\napi_router.include_router(admin_semantic_cache.router, tags=[\"admin/semantic_cache\"])\napi_router.include_router(admin_evaluation_task.router, tags=[\"admin/evaluation/task\"])\n\napi_router.include_router(\n    admin_evaluation_dataset.router, tags=[\"admin/evaluation/dataset\"]\n)\napi_router.include_router(admin_user.router)\n\napi_router.include_router(\n    fastapi_users.get_auth_router(auth_backend), prefix=\"/auth\", tags=[\"auth\"]\n)\n"
  },
  {
    "path": "backend/app/api/routes/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/routes/api_key.py",
    "content": "from fastapi import APIRouter, Depends\nfrom pydantic import BaseModel\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.deps import AsyncSessionDep, CurrentSuperuserDep\nfrom app.auth.api_keys import api_key_manager\nfrom app.models import PublicApiKey\n\nrouter = APIRouter()\n\n\nclass CreateApiKeyRequest(BaseModel):\n    description: str\n\n\nclass CreateApiKeyResponse(BaseModel):\n    api_key: str\n\n\n@router.post(\"/api-keys\")\nasync def create_api_key(\n    session: AsyncSessionDep, user: CurrentSuperuserDep, request: CreateApiKeyRequest\n) -> CreateApiKeyResponse:\n    _, raw_api_key = await api_key_manager.acreate_api_key(\n        session, user, request.description\n    )\n    return CreateApiKeyResponse(api_key=raw_api_key)\n\n\n@router.get(\"/api-keys\")\nasync def list_api_keys(\n    session: AsyncSessionDep,\n    user: CurrentSuperuserDep,\n    params: Params = Depends(),\n) -> Page[PublicApiKey]:\n    return await api_key_manager.list_api_keys(session, user, params)\n\n\n@router.delete(\"/api-keys/{api_key_id}\")\nasync def delete_api_key(\n    session: AsyncSessionDep, user: CurrentSuperuserDep, api_key_id: int\n):\n    return await api_key_manager.delete_api_key(session, user, api_key_id)\n"
  },
  {
    "path": "backend/app/api/routes/chat.py",
    "content": "import logging\nfrom uuid import UUID\nfrom typing import List, Optional, Annotated\nfrom http import HTTPStatus\n\nfrom pydantic import (\n    BaseModel,\n    field_validator,\n)\nfrom fastapi import APIRouter, Depends, HTTPException, Request, Query\nfrom fastapi.responses import StreamingResponse\nfrom fastapi_pagination import Params, Page\nfrom llama_index.core.base.llms.types import ChatMessage, MessageRole\n\nfrom app.api.deps import SessionDep, OptionalUserDep, CurrentUserDep\nfrom app.rag.chat.chat_flow import ChatFlow\nfrom app.rag.retrievers.knowledge_graph.schema import KnowledgeGraphRetrievalResult\nfrom app.repositories import chat_repo\nfrom app.models import Chat, ChatUpdate\n\nfrom app.rag.chat.chat_service import get_final_chat_result\nfrom app.models import Chat, ChatUpdate, ChatFilters\nfrom app.rag.chat.chat_service import (\n    user_can_view_chat,\n    user_can_edit_chat,\n    get_chat_message_subgraph,\n    get_chat_message_recommend_questions,\n    remove_chat_message_recommend_questions,\n)\nfrom app.exceptions import InternalServerError\n\nlogger = logging.getLogger(__name__)\n\nrouter = APIRouter()\n\n\nclass ChatRequest(BaseModel):\n    messages: List[ChatMessage]\n    chat_engine: str = \"default\"\n    chat_id: Optional[UUID] = None\n    stream: bool = True\n\n    @field_validator(\"messages\")\n    @classmethod\n    def check_messages(cls, messages: List[ChatMessage]) -> List[ChatMessage]:\n        if not messages:\n            raise ValueError(\"messages cannot be empty\")\n        for m in messages:\n            if m.role not in [MessageRole.USER, MessageRole.ASSISTANT]:\n                raise ValueError(\"role must be either 'user' or 'assistant'\")\n            if not m.content:\n                raise ValueError(\"message content cannot be empty\")\n            if len(m.content) > 20000:\n                raise ValueError(\"message content cannot exceed 2000 characters\")\n        if messages[-1].role != MessageRole.USER:\n            raise ValueError(\"last message must be from user\")\n        return messages\n\n\n@router.post(\"/chats\")\ndef chats(\n    request: Request,\n    session: SessionDep,\n    user: OptionalUserDep,\n    chat_request: ChatRequest,\n):\n    origin = request.headers.get(\"Origin\") or request.headers.get(\"Referer\")\n    browser_id = request.state.browser_id\n\n    try:\n        chat_flow = ChatFlow(\n            db_session=session,\n            user=user,\n            browser_id=browser_id,\n            origin=origin,\n            chat_id=chat_request.chat_id,\n            chat_messages=chat_request.messages,\n            engine_name=chat_request.chat_engine,\n        )\n\n        if chat_request.stream:\n            return StreamingResponse(\n                chat_flow.chat(),\n                media_type=\"text/event-stream\",\n                headers={\n                    \"X-Content-Type-Options\": \"nosniff\",\n                },\n            )\n        else:\n            return get_final_chat_result(chat_flow.chat())\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise InternalServerError()\n\n\n@router.get(\"/chats\")\ndef list_chats(\n    request: Request,\n    session: SessionDep,\n    user: OptionalUserDep,\n    filters: Annotated[ChatFilters, Query()],\n    params: Params = Depends(),\n) -> Page[Chat]:\n    browser_id = request.state.browser_id\n    return chat_repo.paginate(session, user, browser_id, filters, params)\n\n\n@router.get(\"/chats/{chat_id}\")\ndef get_chat(session: SessionDep, user: OptionalUserDep, chat_id: UUID):\n    chat = chat_repo.must_get(session, chat_id)\n\n    if not user_can_view_chat(chat, user):\n        raise HTTPException(status_code=HTTPStatus.FORBIDDEN, detail=\"Access denied\")\n\n    return {\n        \"chat\": chat,\n        \"messages\": chat_repo.get_messages(session, chat),\n    }\n\n\n@router.put(\"/chats/{chat_id}\")\ndef update_chat(\n    session: SessionDep, user: CurrentUserDep, chat_id: UUID, chat_update: ChatUpdate\n):\n    try:\n        chat = chat_repo.must_get(session, chat_id)\n\n        if not user_can_edit_chat(chat, user):\n            raise HTTPException(\n                status_code=HTTPStatus.FORBIDDEN, detail=\"Access denied\"\n            )\n\n        return chat_repo.update(session, chat, chat_update)\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n\n\n@router.delete(\"/chats/{chat_id}\")\ndef delete_chat(session: SessionDep, user: CurrentUserDep, chat_id: UUID):\n    try:\n        chat = chat_repo.must_get(session, chat_id)\n\n        if not user_can_edit_chat(chat, user):\n            raise HTTPException(\n                status_code=HTTPStatus.FORBIDDEN, detail=\"Access denied\"\n            )\n\n        return chat_repo.delete(session, chat)\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n\n\n@router.get(\n    \"/chat-messages/{chat_message_id}/subgraph\",\n    response_model=KnowledgeGraphRetrievalResult,\n)\ndef get_chat_subgraph(session: SessionDep, user: OptionalUserDep, chat_message_id: int):\n    try:\n        chat_message = chat_repo.must_get_message(session, chat_message_id)\n\n        if not user_can_view_chat(chat_message.chat, user):\n            raise HTTPException(\n                status_code=HTTPStatus.FORBIDDEN, detail=\"Access denied\"\n            )\n\n        result = get_chat_message_subgraph(session, chat_message)\n        return result.model_dump(exclude_none=True)\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n\n\n@router.get(\"/chat-messages/{chat_message_id}/recommended-questions\")\ndef get_recommended_questions(\n    session: SessionDep, user: OptionalUserDep, chat_message_id: int\n) -> List[str]:\n    try:\n        chat_message = chat_repo.must_get_message(session, chat_message_id)\n\n        if not user_can_view_chat(chat_message.chat, user):\n            raise HTTPException(\n                status_code=HTTPStatus.FORBIDDEN, detail=\"Access denied\"\n            )\n\n        return get_chat_message_recommend_questions(session, chat_message)\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n\n\n@router.post(\"/chat-messages/{chat_message_id}/recommended-questions\")\ndef refresh_recommended_questions(\n    session: SessionDep, user: OptionalUserDep, chat_message_id: int\n) -> List[str]:\n    try:\n        chat_message = chat_repo.must_get_message(session, chat_message_id)\n\n        if not user_can_view_chat(chat_message.chat, user):\n            raise HTTPException(\n                status_code=HTTPStatus.FORBIDDEN, detail=\"Access denied\"\n            )\n\n        remove_chat_message_recommend_questions(session, chat_message_id)\n\n        return get_chat_message_recommend_questions(session, chat_message)\n    except HTTPException as e:\n        raise e\n    except Exception as e:\n        logger.exception(e, exc_info=True)\n        raise InternalServerError()\n"
  },
  {
    "path": "backend/app/api/routes/chat_engine.py",
    "content": "import logging\n\nfrom fastapi import APIRouter, Depends\nfrom app.api.deps import SessionDep\nfrom fastapi_pagination import Params, Page\n\nfrom app.models.chat_engine import ChatEngine\nfrom app.rag.chat.config import ChatEngineConfig\nfrom app.repositories.chat_engine import chat_engine_repo\n\nlogger = logging.getLogger(__name__)\n\nrouter = APIRouter()\n\n\n@router.get(\"/chat-engines\")\ndef list_chat_engines(\n    db_session: SessionDep,\n    params: Params = Depends(),\n) -> Page[ChatEngine]:\n    page = chat_engine_repo.paginate(db_session, params, need_public=True)\n    for chat_engine in page.items:\n        engine_config = ChatEngineConfig.model_validate(chat_engine.engine_options)\n        chat_engine.engine_options = engine_config.screenshot()\n    return page\n\n\n@router.get(\"/chat-engines/{chat_engine_id}\")\ndef get_chat_engine(\n    db_session: SessionDep,\n    chat_engine_id: int,\n) -> ChatEngine:\n    chat_engine = chat_engine_repo.must_get(\n        db_session, chat_engine_id, need_public=True\n    )\n    engine_config = ChatEngineConfig.model_validate(chat_engine.engine_options)\n    chat_engine.engine_options = engine_config.screenshot()\n    return chat_engine\n"
  },
  {
    "path": "backend/app/api/routes/document.py",
    "content": "from fastapi import HTTPException, APIRouter\nfrom fastapi.responses import StreamingResponse\nfrom app.api.deps import SessionDep\nfrom app.repositories import document_repo\nfrom app.file_storage import get_file_storage\n\nrouter = APIRouter()\n\n\n@router.get(\"/documents/{doc_id}/download\")\ndef download_file(doc_id: int, session: SessionDep):\n    doc = document_repo.must_get(session, doc_id)\n\n    name = doc.source_uri\n    filestorage = get_file_storage()\n    if filestorage.exists(name):\n        file_size = filestorage.size(name)\n        headers = {\"Content-Length\": str(file_size)}\n\n        def iterfile():\n            with filestorage.open(name) as f:\n                yield from f\n\n        return StreamingResponse(iterfile(), media_type=doc.mime_type, headers=headers)\n    else:\n        raise HTTPException(status_code=404, detail=\"File not found\")\n"
  },
  {
    "path": "backend/app/api/routes/feedback.py",
    "content": "from fastapi import APIRouter, HTTPException, Header\nfrom http import HTTPStatus\nfrom pydantic import BaseModel\n\nfrom app.api.deps import SessionDep, OptionalUserDep\nfrom app.models import FeedbackType, Feedback\nfrom app.repositories import chat_repo\n\nrouter = APIRouter()\n\n\nclass FeedbackRequest(BaseModel):\n    feedback_type: FeedbackType\n    comment: str\n\n\n@router.post(\n    \"/chat-messages/{chat_message_id}/feedback\", status_code=HTTPStatus.CREATED\n)\ndef feedback(\n    session: SessionDep,\n    user: OptionalUserDep,\n    chat_message_id: int,\n    request: FeedbackRequest,\n    origin: str = Header(None),\n    referer: str = Header(None),\n):\n    chat_message = chat_repo.get_message(session, chat_message_id)\n    if not chat_message:\n        raise HTTPException(\n            status_code=HTTPStatus.NOT_FOUND, detail=\"Chat message not found\"\n        )\n    feedback = Feedback(\n        feedback_type=request.feedback_type,\n        comment=request.comment,\n        chat_message_id=chat_message_id,\n        chat_id=chat_message.chat_id,\n        user_id=user.id if user else None,\n        origin=origin or referer,\n    )\n    session.add(feedback)\n    session.commit()\n    return\n"
  },
  {
    "path": "backend/app/api/routes/index.py",
    "content": "from fastapi import APIRouter\nfrom sqlmodel import text\n\nfrom app.api.deps import SessionDep\nfrom app.api.routes.models import SystemConfigStatusResponse\nfrom app.site_settings import SiteSetting\nfrom app.rag.chat.chat_service import (\n    check_rag_required_config,\n    check_rag_optional_config,\n    check_rag_config_need_migration,\n)\n\nrouter = APIRouter()\n\n\n@router.get(\"/healthz\")\ndef status(session: SessionDep):\n    now = session.exec(text(\"SELECT NOW()\")).scalar()\n    return {\"now\": now}\n\n\n@router.get(\"/site-config\")\ndef site_config() -> dict:\n    return SiteSetting.get_client_settings()\n\n\n@router.get(\"/system/bootstrap-status\")\ndef system_bootstrap_status(session: SessionDep) -> SystemConfigStatusResponse:\n    required_config_check_status = check_rag_required_config(session)\n    optional_config_check_status = check_rag_optional_config(session)\n    need_migration_status = check_rag_config_need_migration(session)\n\n    return SystemConfigStatusResponse(\n        required=required_config_check_status,\n        optional=optional_config_check_status,\n        need_migration=need_migration_status,\n    )\n"
  },
  {
    "path": "backend/app/api/routes/models.py",
    "content": "from pydantic import BaseModel\n\n\nclass RequiredConfigStatus(BaseModel):\n    default_llm: bool\n    default_embedding_model: bool\n    default_chat_engine: bool\n    knowledge_base: bool\n\n\nclass OptionalConfigStatus(BaseModel):\n    langfuse: bool\n    default_reranker: bool\n\n\nclass NeedMigrationStatus(BaseModel):\n    chat_engines_without_kb_configured: list[int]\n\n\nclass SystemConfigStatusResponse(BaseModel):\n    required: RequiredConfigStatus\n    optional: OptionalConfigStatus\n    need_migration: NeedMigrationStatus\n"
  },
  {
    "path": "backend/app/api/routes/retrieve/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/api/routes/retrieve/models.py",
    "content": "from typing import Optional\n\nfrom pydantic import BaseModel\n\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrieverConfig,\n)\nfrom app.rag.retrievers.chunk.schema import VectorSearchRetrieverConfig\nfrom app.rag.retrievers.multiple_knowledge_base import FusionRetrievalBaseConfig\n\n# Chunks retrieval\n\n\nclass ChunkRetrievalConfig(FusionRetrievalBaseConfig):\n    full_documents: Optional[bool] = False\n    vector_search: VectorSearchRetrieverConfig\n\n\nclass ChunksRetrievalRequest(BaseModel):\n    query: str\n    retrieval_config: ChunkRetrievalConfig\n\n\n## Knowledge Graph retrieval\n\n\nclass KnowledgeGraphRetrievalConfig(FusionRetrievalBaseConfig):\n    knowledge_graph: KnowledgeGraphRetrieverConfig\n\n\nclass KnowledgeGraphRetrievalRequest(BaseModel):\n    query: str\n    retrieval_config: KnowledgeGraphRetrievalConfig\n"
  },
  {
    "path": "backend/app/api/routes/retrieve/routes.py",
    "content": "import logging\n\nfrom fastapi import APIRouter\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.rag.retrievers.knowledge_graph.fusion_retriever import (\n    KnowledgeGraphFusionRetriever,\n)\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrievalResult,\n)\nfrom app.rag.retrievers.chunk.fusion_retriever import (\n    ChunkFusionRetriever,\n)\nfrom app.exceptions import KBNotFound\nfrom app.rag.retrievers.chunk.schema import ChunksRetrievalResult\nfrom app.rag.llms.resolver import get_llm_or_default\nfrom .models import ChunksRetrievalRequest, KnowledgeGraphRetrievalRequest\n\nrouter = APIRouter()\nlogger = logging.getLogger(__name__)\n\n\n@router.post(\"/retrieve/chunks\")\ndef retrieve_chunks(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    request: ChunksRetrievalRequest,\n) -> ChunksRetrievalResult:\n    try:\n        config = request.retrieval_config\n        llm = get_llm_or_default(db_session, config.llm_id)\n        retriever = ChunkFusionRetriever(\n            db_session=db_session,\n            knowledge_base_ids=config.knowledge_base_ids,\n            llm=llm,\n            use_query_decompose=config.use_query_decompose,\n            config=config.vector_search,\n        )\n        return retriever.retrieve_chunks(request.query, config.full_documents)\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise\n\n\n@router.post(\"/retrieve/knowledge_graph\")\ndef retrieve_knowledge_graph(\n    db_session: SessionDep,\n    user: CurrentSuperuserDep,\n    request: KnowledgeGraphRetrievalRequest,\n) -> KnowledgeGraphRetrievalResult:\n    try:\n        config = request.retrieval_config\n        llm = get_llm_or_default(db_session, config.llm_id)\n        retriever = KnowledgeGraphFusionRetriever(\n            db_session=db_session,\n            knowledge_base_ids=config.knowledge_base_ids,\n            llm=llm,\n            use_query_decompose=config.use_query_decompose,\n            config=config.knowledge_graph,\n        )\n        return retriever.retrieve_knowledge_graph(request.query)\n    except KBNotFound as e:\n        raise e\n    except Exception as e:\n        logger.exception(e)\n        raise\n"
  },
  {
    "path": "backend/app/api/routes/user.py",
    "content": "from fastapi import APIRouter\n\nfrom app.api.deps import CurrentUserDep\nfrom app.auth.schemas import UserRead\n\nrouter = APIRouter()\n\n\n@router.get(\"/users/me\", response_model=UserRead)\ndef me(user: CurrentUserDep):\n    return user\n"
  },
  {
    "path": "backend/app/api_server.py",
    "content": "import app.logger\nimport sentry_sdk\n\nfrom dotenv import load_dotenv\nfrom contextlib import asynccontextmanager\nfrom fastapi import FastAPI, Request, Response\nfrom fastapi.routing import APIRoute\nfrom starlette.middleware.cors import CORSMiddleware\nfrom app.api.main import api_router\nfrom app.core.config import settings\nfrom app.site_settings import SiteSetting\nfrom app.utils.uuid6 import uuid7\n\n\nload_dotenv()\n\n\ndef custom_generate_unique_id(route: APIRoute) -> str:\n    return f\"{route.tags[0]}-{route.name}\"\n\n\nif settings.SENTRY_DSN and settings.ENVIRONMENT != \"local\":\n    sentry_sdk.init(\n        dsn=str(settings.SENTRY_DSN),\n        enable_tracing=True,\n        traces_sample_rate=settings.SENTRY_TRACES_SAMPLE_RATE,\n        profiles_sample_rate=settings.SENTRY_PROFILES_SAMPLE_RATE,\n    )\n\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    SiteSetting.update_db_cache()\n    yield\n\n\napp = FastAPI(\n    title=settings.PROJECT_NAME,\n    openapi_url=f\"{settings.API_V1_STR}/openapi.json\",\n    generate_unique_id_function=custom_generate_unique_id,\n    lifespan=lifespan,\n)\n\n\n# Set all CORS enabled origins\nif settings.BACKEND_CORS_ORIGINS:\n    app.add_middleware(\n        CORSMiddleware,\n        allow_origins=[\n            str(origin).strip(\"/\") for origin in settings.BACKEND_CORS_ORIGINS\n        ],\n        allow_credentials=True,\n        allow_methods=[\"*\"],\n        allow_headers=[\"*\"],\n    )\n\n\n@app.middleware(\"http\")\nasync def identify_browser(request: Request, call_next):\n    browser_id = request.cookies.get(settings.BROWSER_ID_COOKIE_NAME)\n    has_browser_id = bool(browser_id)\n    if not browser_id:\n        browser_id = uuid7()\n    request.state.browser_id = browser_id\n    response: Response = await call_next(request)\n    if not has_browser_id:\n        response.set_cookie(\n            settings.BROWSER_ID_COOKIE_NAME,\n            browser_id,\n            max_age=settings.BROWSER_ID_COOKIE_MAX_AGE,\n        )\n    return response\n\n\napp.include_router(api_router, prefix=settings.API_V1_STR)\n"
  },
  {
    "path": "backend/app/auth/api_keys.py",
    "content": "import base64\nimport string\nimport secrets\nimport hashlib\nfrom typing import Optional, Tuple\n\nfrom fastapi import Request\nfrom sqlmodel import Session, select\nfrom sqlmodel.ext.asyncio.session import AsyncSession\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\n\nfrom app.models import ApiKey, User\n\n\ndef generate_api_key(length=50):\n    characters = string.ascii_letters + string.digits\n    api_key = \"\".join(secrets.choice(characters) for _ in range(length))\n    return \"ta-\" + api_key\n\n\nAPI_KEY_HEADER = \"Authorization\"\nBEARER_PREFIX = \"Bearer \"\n\n\ndef get_api_key_from_request(request: Request) -> str | None:\n    api_key_header = request.headers.get(API_KEY_HEADER)\n    if api_key_header is None:\n        return None\n\n    if not api_key_header.startswith(BEARER_PREFIX):\n        return None\n\n    return api_key_header[len(BEARER_PREFIX) :].strip()\n\n\ndef encrypt_api_key(api_key: str) -> str:\n    # An empty salt is used because we need to look tokens up solely by\n    # their hashed value. Additionally, tokens are always cryptographically\n    # pseudo-random and unique, therefore salting provides no\n    # additional security.\n    algorithm = \"pbkdf2_sha512\"\n    api_key = api_key.encode(\"utf-8\")\n    salt = b\"\"\n    iterations = 20_000\n    hash = hashlib.pbkdf2_hmac(\"sha512\", api_key, salt, iterations)\n    hash = base64.b64encode(hash).decode(\"ascii\").strip()\n    return \"%s$%d$%s\" % (algorithm, iterations, hash)\n\n\nclass ApiKeyManager:\n    async def acreate_api_key(\n        self, session: AsyncSession, user: User, description: str\n    ) -> Tuple[ApiKey, str]:\n        api_key = generate_api_key()\n        hashed_api_key = encrypt_api_key(api_key)\n        api_key_obj = ApiKey(\n            hashed_secret=hashed_api_key,\n            api_key_display=api_key[:7] + \"....\" + api_key[-3:],\n            user_id=user.id,\n            description=description,\n        )\n        session.add(api_key_obj)\n        await session.commit()\n        await session.refresh(api_key_obj)\n        return api_key_obj, api_key\n\n    def create_api_key(\n        self, session: Session, user: User, description: str\n    ) -> Tuple[ApiKey, str]:\n        api_key = generate_api_key()\n        hashed_api_key = encrypt_api_key(api_key)\n        api_key_obj = ApiKey(\n            hashed_secret=hashed_api_key,\n            api_key_display=api_key[:7] + \"....\" + api_key[-3:],\n            user_id=user.id,\n            description=description,\n        )\n        session.add(api_key_obj)\n        session.commit()\n        session.refresh(api_key_obj)\n        return api_key_obj, api_key\n\n    async def get_active_user_by_raw_api_key(\n        self, session: AsyncSession, api_key: str\n    ) -> Optional[User]:\n        if not api_key:\n            return None\n        hashed_api_key = encrypt_api_key(api_key)\n        results = await session.exec(\n            select(ApiKey).where(\n                ApiKey.is_active == True,\n                ApiKey.hashed_secret == hashed_api_key,\n            )\n        )\n        api_key_obj = results.first()\n        if not api_key_obj:\n            return None\n\n        user = await session.get(User, api_key_obj.user_id)\n        if not (user.is_active and user.is_verified):\n            return None\n        return user\n\n    async def get_active_user_from_request(\n        self, session: AsyncSession, request: Request\n    ) -> Optional[User]:\n        api_key = get_api_key_from_request(request)\n        return await self.get_active_user_by_raw_api_key(session, api_key)\n\n    async def list_api_keys(\n        self, session: AsyncSession, user: User, params: Params\n    ) -> Page[ApiKey]:\n        api_keys = await paginate(\n            session,\n            select(ApiKey)\n            .where(ApiKey.user == user, ApiKey.is_active == True)\n            .order_by(ApiKey.created_at.desc()),\n            params,\n        )\n        return api_keys\n\n    async def delete_api_key(self, session: AsyncSession, user: User, api_key_id: int):\n        result = await session.exec(\n            select(ApiKey).where(\n                ApiKey.id == api_key_id,\n                ApiKey.user_id == user.id,\n                ApiKey.is_active == True,\n            )\n        )\n        api_key = result.first()\n        if api_key:\n            api_key.is_active = False\n            await session.commit()\n\n\napi_key_manager = ApiKeyManager()\n"
  },
  {
    "path": "backend/app/auth/db.py",
    "content": "from fastapi import Depends\nfrom fastapi_users_db_sqlmodel import SQLModelUserDatabaseAsync\nfrom fastapi_users_db_sqlmodel.access_token import SQLModelAccessTokenDatabaseAsync\nfrom sqlmodel.ext.asyncio.session import AsyncSession\n\nfrom app.models import User, UserSession\nfrom app.core.db import get_db_async_session\n\n\nasync def get_user_db(session: AsyncSession = Depends(get_db_async_session)):\n    yield SQLModelUserDatabaseAsync(session, User)\n\n\nasync def get_user_session_db(\n    session: AsyncSession = Depends(get_db_async_session),\n):\n    yield SQLModelAccessTokenDatabaseAsync(session, UserSession)\n"
  },
  {
    "path": "backend/app/auth/schemas.py",
    "content": "import uuid\n\nfrom fastapi_users import schemas\n\n\nclass UserRead(schemas.BaseUser[uuid.UUID]):\n    pass\n\n\nclass UserCreate(schemas.BaseUserCreate):\n    pass\n\n\nclass UserUpdate(schemas.BaseUserUpdate):\n    pass\n"
  },
  {
    "path": "backend/app/auth/users.py",
    "content": "import logging\nimport uuid\nimport contextlib\nfrom http import HTTPStatus\nfrom typing import Optional\n\nfrom fastapi import Depends, Request, HTTPException\nfrom fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin\nfrom fastapi_users.authentication import (\n    AuthenticationBackend,\n    CookieTransport,\n)\nfrom fastapi_users.authentication.strategy import DatabaseStrategy\nfrom fastapi_users_db_sqlmodel import SQLModelUserDatabaseAsync\nfrom fastapi_users_db_sqlmodel.access_token import SQLModelAccessTokenDatabaseAsync\nfrom fastapi_users.exceptions import UserAlreadyExists, UserNotExists\nfrom sqlmodel.ext.asyncio.session import AsyncSession\n\nfrom app.core.config import settings\nfrom app.core.db import get_db_async_session\nfrom app.models import User, UserSession\nfrom app.auth.db import get_user_db, get_user_session_db\nfrom app.auth.api_keys import api_key_manager\nfrom app.auth.schemas import UserCreate, UserUpdate\n\nlogger = logging.getLogger(__name__)\n\n\nclass UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):\n    reset_password_token_secret = settings.SECRET_KEY\n    verification_token_secret = settings.SECRET_KEY\n\n    async def on_after_register(self, user: User, request: Optional[Request] = None):\n        print(f\"User {user.id} has registered.\")\n\n    async def on_after_forgot_password(\n        self, user: User, token: str, request: Optional[Request] = None\n    ):\n        print(f\"User {user.id} has forgot their password. Reset token: {token}\")\n\n    async def on_after_request_verify(\n        self, user: User, token: str, request: Optional[Request] = None\n    ):\n        print(f\"Verification requested for user {user.id}. Verification token: {token}\")\n\n\nasync def get_user_manager(user_db: SQLModelUserDatabaseAsync = Depends(get_user_db)):\n    yield UserManager(user_db)\n\n\ncookie_transport = CookieTransport(\n    cookie_name=settings.SESSION_COOKIE_NAME,\n    cookie_max_age=settings.SESSION_COOKIE_MAX_AGE,\n    cookie_secure=settings.SESSION_COOKIE_SECURE,\n)\n\n\ndef get_database_strategy(\n    user_session_db: SQLModelAccessTokenDatabaseAsync[UserSession] = Depends(\n        get_user_session_db\n    ),\n) -> DatabaseStrategy:\n    return DatabaseStrategy(user_session_db, lifetime_seconds=3600 * 24 * 90)\n\n\nauth_backend = AuthenticationBackend(\n    name=\"database\",\n    transport=cookie_transport,\n    get_strategy=get_database_strategy,\n)\n\nfastapi_users = FastAPIUsers[User, uuid.UUID](get_user_manager, [auth_backend])\n\n\n# Following methods are used to get the current user from the request,\n# They all support both session cookies and API keys for authentication,\n# it will first check for a session cookie, if not found, then check for an API key.\nasync def current_user(\n    request: Request,\n    user: User = Depends(\n        fastapi_users.current_user(optional=True, active=True, verified=True)\n    ),\n    session: AsyncSession = Depends(get_db_async_session),\n) -> User:\n    if user:\n        # already authenticated with a valid session cookie\n        return user\n\n    # check for an API key\n    user = await api_key_manager.get_active_user_from_request(session, request)\n    if not user:\n        raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED)\n    return user\n\n\nasync def current_superuser(\n    request: Request,\n    user: User = Depends(\n        fastapi_users.current_user(optional=True, active=True, verified=True)\n    ),\n    session: AsyncSession = Depends(get_db_async_session),\n) -> User:\n    if user:\n        if user.is_superuser:\n            return user\n        raise HTTPException(status_code=HTTPStatus.FORBIDDEN)\n\n    # check for an API key\n    user = await api_key_manager.get_active_user_from_request(session, request)\n    if not user:\n        raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED)\n    if not user.is_superuser:\n        raise HTTPException(status_code=HTTPStatus.FORBIDDEN)\n    return user\n\n\nasync def optional_current_user(\n    request: Request,\n    user: User = Depends(\n        fastapi_users.current_user(optional=True, active=True, verified=True)\n    ),\n    session: AsyncSession = Depends(get_db_async_session),\n) -> Optional[User]:\n    if user:\n        # already authenticated with a valid session cookie\n        return user\n\n    # check for an API key\n    return await api_key_manager.get_active_user_from_request(session, request)\n\n\nget_user_db_context = contextlib.asynccontextmanager(get_user_db)\nget_user_manager_context = contextlib.asynccontextmanager(get_user_manager)\n\n\nasync def create_user(\n    session: AsyncSession,\n    email: str,\n    password: str,\n    is_active: bool = True,\n    is_verified: bool = True,\n    is_superuser: bool = False,\n) -> User:\n    try:\n        async with get_user_db_context(session) as user_db:\n            async with get_user_manager_context(user_db) as user_manager:\n                user = await user_manager.create(\n                    UserCreate(\n                        email=email,\n                        password=password,\n                        is_active=is_active,\n                        is_verified=is_verified,\n                        is_superuser=is_superuser,\n                    )\n                )\n                return user\n    except UserAlreadyExists:\n        logger.error(f\"User {email} already exists\")\n        raise\n\n\nasync def update_user_password(\n    session: AsyncSession,\n    user_id: uuid.UUID,\n    new_password: str,\n) -> User:\n    try:\n        async with get_user_db_context(session) as user_db:\n            async with get_user_manager_context(user_db) as user_manager:\n                user = await user_manager.get(user_id)\n                if not user:\n                    raise UserNotExists(f\"User {user_id} does not exist\")\n\n                user_update = UserUpdate(password=new_password)\n                await user_manager.update(user_update, user)\n                # verify\n                updated_user = await user_manager.get(user_id)\n                return updated_user\n\n    except UserNotExists as e:\n        logger.error(str(e))\n        raise\n    except Exception as e:\n        logger.error(f\"Failed to update password for user {id}: {e}\")\n        raise\n"
  },
  {
    "path": "backend/app/celery.py",
    "content": "from celery import Celery\n\nfrom app.core.config import settings\n\n\napp = Celery(\n    settings.PROJECT_NAME,\n    broker=settings.CELERY_BROKER_URL,\n    backend=settings.CELERY_RESULT_BACKEND,\n)\n\napp.conf.update(\n    task_acks_late=True,\n    task_reject_on_worker_lost=True,\n    task_routes=[\n        {\"app.tasks.evaluate.*\": {\"queue\": \"evaluation\"}},\n        {\"*\": {\"queue\": \"default\"}},\n    ],\n    broker_connection_retry_on_startup=True,\n)\n\napp.autodiscover_tasks([\"app\"])\n"
  },
  {
    "path": "backend/app/core/config.py",
    "content": "import enum\nfrom typing import Annotated, Any\nfrom urllib.parse import quote\n\nfrom pydantic import (\n    AnyUrl,\n    BeforeValidator,\n    HttpUrl,\n    MySQLDsn,\n    SecretStr,\n    computed_field,\n    model_validator,\n)\nfrom pydantic_core import MultiHostUrl\nfrom pydantic_settings import BaseSettings, SettingsConfigDict\nfrom typing_extensions import Self\n\n\ndef parse_cors(v: Any) -> list[str] | str:\n    if isinstance(v, str) and not v.startswith(\"[\"):\n        return [i.strip() for i in v.split(\",\")]\n    elif isinstance(v, list | str):\n        return v\n    raise ValueError(v)\n\n\nclass Environment(str, enum.Enum):\n    LOCAL = \"local\"\n    STAGING = \"staging\"\n    PRODUCTION = \"production\"\n\n\nclass Settings(BaseSettings):\n    model_config = SettingsConfigDict(\n        env_file=\".env\", env_ignore_empty=True, extra=\"ignore\"\n    )\n    API_V1_STR: str = \"/api/v1\"\n    SECRET_KEY: str\n    DOMAIN: str = \"localhost\"\n    ENVIRONMENT: Environment = Environment.LOCAL\n    LOG_LEVEL: str = \"INFO\"\n    SQLALCHEMY_LOG_LEVEL: str = \"WARNING\"\n\n    SESSION_COOKIE_NAME: str = \"session\"\n    # 90 days\n    SESSION_COOKIE_MAX_AGE: int = 3600 * 24 * 90\n    SESSION_COOKIE_SECURE: bool = False\n\n    BROWSER_ID_COOKIE_NAME: str = \"bid\"\n    BROWSER_ID_COOKIE_MAX_AGE: int = 3600 * 24 * 365 * 2\n\n    @computed_field  # type: ignore[misc]\n    @property\n    def server_host(self) -> str:\n        # Use HTTPS for anything other than local development\n        if self.ENVIRONMENT == Environment.LOCAL:\n            return f\"http://{self.DOMAIN}\"\n        return f\"https://{self.DOMAIN}\"\n\n    BACKEND_CORS_ORIGINS: Annotated[\n        list[AnyUrl] | str, BeforeValidator(parse_cors)\n    ] = []\n    BACKEND_CORS_ORIGIN_REGEXP: str | None = None\n\n    PROJECT_NAME: str = \"TiDB.AI\"\n    SENTRY_DSN: HttpUrl | None = None\n    SENTRY_TRACES_SAMPLE_RATE: float = 1.0\n    SENTRY_PROFILES_SAMPLE_RATE: float = 1.0\n\n    @model_validator(mode=\"after\")\n    def _validate_sentry_sample_rate(self) -> Self:\n        if not self.SENTRY_DSN:\n            return self\n        if self.SENTRY_TRACES_SAMPLE_RATE < 0 or self.SENTRY_TRACES_SAMPLE_RATE > 1:\n            raise ValueError(\"SENTRY_TRACES_SAMPLE_RATE must be between 0 and 1\")\n        if self.SENTRY_PROFILES_SAMPLE_RATE < 0 or self.SENTRY_PROFILES_SAMPLE_RATE > 1:\n            raise ValueError(\"SENTRY_PROFILES_SAMPLE_RATE must be between 0 and 1\")\n        return self\n\n    LOCAL_FILE_STORAGE_PATH: str = \"/shared/data\"\n\n    TIDB_HOST: str = \"127.0.0.1\"\n    TIDB_PORT: int = 4000\n    TIDB_USER: str = \"root\"\n    TIDB_PASSWORD: str = \"\"\n    TIDB_DATABASE: str\n    TIDB_SSL: bool = True\n\n    ENABLE_QUESTION_CACHE: bool = False\n\n    CELERY_BROKER_URL: str = \"redis://redis:6379/0\"\n    CELERY_RESULT_BACKEND: str = \"redis://redis:6379/0\"\n\n    # TODO: move below config to `option` table, it should be configurable by staff in console\n    TIDB_AI_CHAT_ENDPOINT: str = \"https://tidb.ai/api/v1/chats\"\n    TIDB_AI_API_KEY: SecretStr | None = None\n\n    COMPLIED_INTENT_ANALYSIS_PROGRAM_PATH: str | None = None\n    COMPLIED_PREREQUISITE_ANALYSIS_PROGRAM_PATH: str | None = None\n\n    # NOTICE: EMBEDDING_DIMS and EMBEDDING_MAX_TOKENS is deprecated and\n    # will be removed in the future.\n    EMBEDDING_DIMS: int = 1536\n    EMBEDDING_MAX_TOKENS: int = 2048\n\n    EVALUATION_OPENAI_API_KEY: str | None = None\n\n    @computed_field  # type: ignore[misc]\n    @property\n    def SQLALCHEMY_DATABASE_URI(self) -> MySQLDsn:\n        return MultiHostUrl.build(\n            scheme=\"mysql+pymysql\",\n            username=self.TIDB_USER,\n            # TODO: remove quote after following issue is fixed:\n            # https://github.com/pydantic/pydantic/issues/8061\n            password=quote(self.TIDB_PASSWORD),\n            host=self.TIDB_HOST,\n            port=self.TIDB_PORT,\n            path=self.TIDB_DATABASE,\n            query=\"ssl_verify_cert=true&ssl_verify_identity=true\"\n            if self.TIDB_SSL\n            else None,\n        )\n\n    @computed_field  # type: ignore[misc]\n    @property\n    def SQLALCHEMY_ASYNC_DATABASE_URI(self) -> MySQLDsn:\n        return MultiHostUrl.build(\n            scheme=\"mysql+asyncmy\",\n            username=self.TIDB_USER,\n            password=quote(self.TIDB_PASSWORD),\n            host=self.TIDB_HOST,\n            port=self.TIDB_PORT,\n            path=self.TIDB_DATABASE,\n        )\n\n    @model_validator(mode=\"after\")\n    def _validate_secrets(self) -> Self:\n        secret = self.SECRET_KEY\n        if not secret:\n            raise ValueError(\n                \"Please set a secret key using the SECRET_KEY environment variable.\"\n            )\n\n        min_length = 32\n        if len(secret.encode()) < min_length:\n            message = (\n                \"The SECRET_KEY is too short, \"\n                f\"please use a longer secret, at least {min_length} characters.\"\n            )\n            raise ValueError(message)\n        return self\n\n\nsettings = Settings()  # type: ignore\n"
  },
  {
    "path": "backend/app/core/db.py",
    "content": "import ssl\nimport contextlib\nfrom typing import AsyncGenerator, Generator\n\nfrom sqlmodel import create_engine, Session\nfrom sqlalchemy import event\nfrom sqlalchemy.orm import scoped_session, sessionmaker\nfrom sqlalchemy.ext.asyncio import create_async_engine\nfrom sqlmodel.ext.asyncio.session import AsyncSession\n\nfrom app.core.config import settings\n\n\n# TiDB Serverless clusters have a limitation: if there are no active connections for 5 minutes,\n# they will shut down, which closes all connections, so we need to recycle the connections\nengine = create_engine(\n    str(settings.SQLALCHEMY_DATABASE_URI),\n    pool_size=20,\n    max_overflow=40,\n    pool_recycle=300,\n    pool_pre_ping=True,\n)\n\n# create a scoped session, ensure in multi-threading environment, each thread has its own session\nScoped_Session = scoped_session(sessionmaker(bind=engine, class_=Session))\n\n\ndef get_ssl_context():\n    ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)\n    ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2\n    ssl_context.check_hostname = True\n    return ssl_context\n\n\nasync_engine = create_async_engine(\n    str(settings.SQLALCHEMY_ASYNC_DATABASE_URI),\n    pool_recycle=300,\n    connect_args={\n        # seems config ssl in url is not working\n        # we can only config ssl in connect_args\n        \"ssl\": get_ssl_context(),\n    }\n    if settings.TIDB_SSL\n    else {},\n)\n\n\ndef prepare_db_connection(dbapi_connection, connection_record):\n    cursor = dbapi_connection.cursor()\n    # In TiDB.AI, we store datetime in the database using UTC timezone.\n    # Therefore, we need to set the timezone to '+00:00'.\n    cursor.execute(\"SET time_zone = '+00:00'\")\n    cursor.close()\n\n\nevent.listen(engine, \"connect\", prepare_db_connection)\nevent.listen(async_engine.sync_engine, \"connect\", prepare_db_connection)\n\n\ndef get_db_session() -> Generator[Session, None, None]:\n    with Session(engine, expire_on_commit=False) as session:\n        yield session\n\n\nasync def get_db_async_session() -> AsyncGenerator[AsyncSession, None]:\n    async with AsyncSession(async_engine, expire_on_commit=False) as session:\n        yield session\n\n\nget_db_async_session_context = contextlib.asynccontextmanager(get_db_async_session)\n"
  },
  {
    "path": "backend/app/evaluation/evals.py",
    "content": "import logging\nimport os\n\nimport requests\nimport typing\nimport uuid\nimport json\nfrom tqdm import tqdm\nfrom datetime import datetime\nfrom langfuse import Langfuse\nfrom langfuse.client import DatasetItemClient\nfrom langfuse.model import DatasetStatus\nfrom tenacity import retry, stop_after_attempt, wait_fixed\nfrom llama_index.llms.gemini import Gemini\nfrom llama_index.llms.openai import OpenAI\n\nfrom app.core.config import settings\nfrom app.evaluation.evaluators import (\n    LanguageEvaluator,\n    ToxicityEvaluator,\n    E2ERagEvaluator,\n)\nimport pandas as pd\nfrom ragas.metrics import (\n    FactualCorrectness,\n    SemanticSimilarity,\n)\nfrom ragas import evaluate, EvaluationDataset\nfrom ragas.llms import LangchainLLMWrapper\nfrom ragas.embeddings import LangchainEmbeddingsWrapper\nfrom langchain_openai import ChatOpenAI\nfrom langchain_openai import OpenAIEmbeddings\n\nlogger = logging.getLogger(__name__)\n\nDEFAULT_METRICS = [\"toxicity\", \"language\"]\nDEFAULT_TIDB_AI_CHAT_ENGINE = \"default\"\n\n\nclass Evaluation:\n    \"\"\"\n    Evaluate a dataset using TiDB AI and Langfuse.\n\n    Args:\n        dataset_name: \"customize\" or the name of the dataset in langfuse to evaluate\n        run_name: The name of the run to create. If not provided, a random name will be generated.\n        llm_provider: The LLM provider to use. Can be \"openai\" or \"google\".\n\n    Examples:\n\n    ```python\n    evaluation = Evaluation(dataset_name=\"my_dataset\")\n    evaluation.run()\n    ```\n    \"\"\"\n\n    def __init__(\n        self,\n        dataset_name: str,\n        run_name: typing.Optional[str] = None,\n        llm_provider: typing.Literal[\"openai\", \"gemini\"] = \"openai\",\n        tidb_ai_chat_engine: typing.Optional[str] = DEFAULT_TIDB_AI_CHAT_ENGINE,\n    ) -> None:\n        self.langfuse = Langfuse()\n        self.dataset_name = dataset_name\n        self.is_customize_dataset = dataset_name == \"customize\"\n        if not self.is_customize_dataset:\n            self.dataset = self.langfuse.get_dataset(dataset_name)\n\n        self.tidb_ai_chat_engine = tidb_ai_chat_engine\n\n        if run_name is None:\n            random_str = uuid.uuid4().hex[:6]\n            self.run_name = datetime.now().strftime(f\"%Y-%m-%d-%H-{random_str}\")\n        else:\n            self.run_name = run_name\n\n        llm_provider = llm_provider.lower()\n        if llm_provider == \"openai\":\n            self._llama_llm = OpenAI(model=\"gpt-4o\")\n        elif llm_provider == \"gemini\":\n            self._llama_llm = Gemini(model=\"models/gemini-2.0-flash\")\n        else:\n            raise ValueError(f\"Invalid LLM provider: {llm_provider}\")\n\n        self._metrics = {\n            \"language\": LanguageEvaluator(llm=self._llama_llm),\n            \"toxicity\": ToxicityEvaluator(llm=self._llama_llm),\n            \"e2e_rag\": E2ERagEvaluator(model=\"gpt-4o\"),\n        }\n\n    def runeval_dataset(\n        self,\n        csv_dataset: str,\n        run_size: int = 30,\n        checkpoint_file: str = \"checkpoint.json\",\n        error_file: str = \"eval_error.csv\",\n    ) -> None:\n        if not os.path.exists(csv_dataset):\n            raise FileNotFoundError(f\"File not found: {csv_dataset}\")\n\n        df = pd.read_csv(csv_dataset)\n        eval_list = df.to_dict(orient=\"records\")\n        eval_list = eval_list[:run_size]\n\n        # checkpoint info\n        ragas_list = []\n        completed_queries = set()\n        if os.path.exists(checkpoint_file):\n            with open(checkpoint_file, \"r\") as f:\n                checkpoint_data = json.load(f)\n                completed_queries = set(checkpoint_data[\"completed_queries\"])\n                ragas_list = checkpoint_data[\"ragas_list\"]\n\n        # error info\n        error_list = []\n        errored_queries = set()\n        if os.path.exists(error_file):\n            error_df = pd.read_csv(error_file)\n            error_list = error_df.to_dict(orient=\"records\")\n            errored_queries = set(item[\"query\"] for item in error_list)\n\n        for item in tqdm(eval_list):\n            if item[\"query\"] in completed_queries or item[\"query\"] in errored_queries:\n                continue  # skip completed or errored queries\n\n            messages = [{\"role\": \"user\", \"content\": item[\"query\"]}]\n            try:\n                response, _ = self._generate_answer_by_tidb_ai(messages)\n                user_input = json.dumps(messages)\n\n                ragas_list.append(\n                    {\n                        \"user_input\": user_input,\n                        \"reference\": item[\"reference\"],\n                        \"response\": response,\n                        # TODO: we cannot get retrieved_contexts now, due to the external engine\n                        # \"retrieved_contexts\": [],\n                        # Add rest fields from raw data\n                        **{\n                            k: v\n                            for k, v in item.items()\n                            if k not in [\"query\", \"reference\"]\n                        },\n                    }\n                )\n\n                # save the checkpoint file\n                completed_queries.add(item[\"query\"])\n                checkpoint_data = {\n                    \"completed_queries\": list(completed_queries),\n                    \"ragas_list\": ragas_list,\n                }\n                with open(checkpoint_file, \"w\") as f:\n                    json.dump(checkpoint_data, f)\n            except Exception as e:\n                print(f\"Error processing query: {item['query']}, error: {e}\")\n                item[\"error_message\"] = str(e)\n                error_list.append(item)  # Add the item to the error list\n\n                # Save the errors to the error file\n                pd.DataFrame(error_list).to_csv(error_file, index=False)\n\n        ragas_dataset = EvaluationDataset.from_list(ragas_list)\n        evaluator_llm = LangchainLLMWrapper(ChatOpenAI(model=\"gpt-4o\"))\n        evaluator_embeddings = LangchainEmbeddingsWrapper(\n            OpenAIEmbeddings(model=\"text-embedding-3-large\")\n        )\n        metrics = [\n            # LLMContextRecall(llm=evaluator_llm),  # retrieved_contexts required\n            FactualCorrectness(llm=evaluator_llm),\n            # Faithfulness(llm=evaluator_llm),  # retrieved_contexts required\n            SemanticSimilarity(embeddings=evaluator_embeddings),\n        ]\n        results = evaluate(dataset=ragas_dataset, metrics=metrics)\n        df_results = results.to_pandas()\n\n        df_raw_data = pd.DataFrame(ragas_list)\n        additional_columns = df_raw_data.drop(\n            columns=[\"user_input\", \"reference\", \"response\"]\n        )\n        df_results_combined = pd.concat([df_results, additional_columns], axis=1)\n\n        df_results_combined = df_results_combined.applymap(\n            lambda x: x.replace(\"\\n\", \"\\\\n\").replace(\"\\r\", \"\\\\r\")\n            if isinstance(x, str)\n            else x\n        )\n        df_results_combined.to_csv(f\"results_{self.run_name}.csv\", index=False)\n\n        print(f\"Saved results to results_{self.run_name}.csv\")\n\n    def run(self, metrics: list = DEFAULT_METRICS) -> None:\n        for item in tqdm(self.dataset.items):\n            if item.status != DatasetStatus.ACTIVE:\n                continue\n\n            sample_data = self.parse_sample(item)\n            output, trace_id = self._generate_answer_by_tidb_ai(sample_data[\"messages\"])\n            trace_data = fetch_rag_data(self.langfuse, trace_id)\n            question = json.dumps(sample_data[\"messages\"])\n            item.link(\n                trace_or_observation=None,\n                trace_id=trace_id,\n                run_name=self.run_name,\n            )\n\n            for metric in metrics:\n                evaluator = self._metrics[metric]\n                result = evaluator.evaluate(\n                    query=question,\n                    response=output,\n                    contexts=trace_data.get(\"retrieval_context\", []),\n                    reference=sample_data.get(\"expected_output\", None),\n                )\n                if isinstance(result, dict):\n                    for eval_name, eval_res in result.items():\n                        self.langfuse.score(\n                            trace_id=trace_id,\n                            name=eval_name,\n                            value=eval_res.score,\n                            comment=eval_res.feedback,\n                        )\n                else:\n                    self.langfuse.score(\n                        trace_id=trace_id,\n                        name=metric,\n                        value=result.score,\n                        comment=result.feedback,\n                    )\n\n    def parse_sample(self, item: DatasetItemClient):\n        expected_output = item.expected_output\n        messages = []\n        if \"history\" in item.input:\n            messages = [\n                {\n                    \"role\": message[\"role\"],\n                    \"content\": message[\"content\"],\n                }\n                for message in item.input[\"history\"]\n            ]\n\n        if \"userInput\" in item.input:\n            messages.append({\"role\": \"user\", \"content\": item.input[\"userInput\"]})\n        elif \"input\" in item.input:\n            messages.append({\"role\": \"user\", \"content\": item.input[\"input\"]})\n\n        sample_data = {\n            \"messages\": messages,\n            \"expected_output\": expected_output,\n        }\n\n        if \"retrieval_context\" in item.input:\n            sample_data[\"retrieval_context\"] = item.input[\"retrieval_context\"]\n        if \"graph_context\" in item.input:\n            sample_data[\"graph_context\"] = item.input[\"graph_context\"]\n        if \"refined_question\" in item.input:\n            sample_data[\"refined_question\"] = item.input[\"refined_question\"]\n\n        return sample_data\n\n    @retry(stop=stop_after_attempt(2), wait=wait_fixed(5))\n    def _generate_answer_by_tidb_ai(self, messages: list) -> (str, str):\n        response = requests.post(\n            settings.TIDB_AI_CHAT_ENDPOINT,\n            headers={\n                \"Content-Type\": \"application/json\",\n                \"Authorization\": f\"Bearer {settings.TIDB_AI_API_KEY}\",\n                \"Origin\": \"evaluation\",\n            },\n            json={\n                \"messages\": messages,\n                \"index\": \"default\",\n                \"chat_engine\": self.tidb_ai_chat_engine,\n                \"stream\": False,\n            },\n        )\n        response.raise_for_status()\n        data = response.json()\n        if data[\"trace\"] is None:\n            trace_id = None\n        else:\n            trace_url = data[\"trace\"][\"langfuse_url\"]\n            trace_id = parse_langfuse_trace_id_from_url(trace_url)\n\n        answer = data[\"content\"]\n        return answer, trace_id\n\n    def generate_answer_by_tidb_ai(self, messages: list) -> str:\n        response = requests.post(\n            settings.TIDB_AI_CHAT_ENDPOINT,\n            headers={\n                \"Content-Type\": \"application/json\",\n                \"Authorization\": f\"Bearer {settings.TIDB_AI_API_KEY}\",\n            },\n            json={\n                \"messages\": messages,\n                \"index\": \"default\",\n                \"chat_engine\": self.tidb_ai_chat_engine,\n                \"stream\": False,\n            },\n        )\n        response.raise_for_status()\n        data = response.text\n\n        return data\n\n\ndef parse_langfuse_trace_id_from_url(trace_url: str) -> str:\n    # Example trace_url: https://us.cloud.langfuse.com/trace/87e7eb2e-b789-4b23-af60-fbcf0fd517a1\n    return trace_url.split(\"/\")[-1]\n\n\ndef fetch_rag_data(langfuse_client: Langfuse, tracing_id: str):\n    graph_context_key = \"retrieve_from_graph\"\n    reranking_key = \"reranking\"\n    refined_question_key = \"condense_question\"\n\n    tracing_data = langfuse_client.fetch_trace(tracing_id)\n\n    data = {\n        \"history\": tracing_data.data.input[\"chat_history\"],\n        \"input\": tracing_data.data.input[\"user_question\"],\n        \"graph_context\": None,\n        \"refined_question\": None,\n        \"retrieval_context\": None,\n        \"output\": (\n            tracing_data.data.output[\"content\"]\n            if tracing_data.data.output is not None\n            and \"content\" in tracing_data.data.output\n            else None\n        ),\n        \"source_tracing_id\": tracing_id,\n    }\n\n    for ob in tracing_data.data.observations:\n        if graph_context_key == ob.name:\n            graph_context = {query: sg for query, sg in ob.output[\"graph\"].items()}\n            for _, sg in graph_context.items():\n                for entity in sg[\"entities\"]:\n                    entity.pop(\"meta\", None)\n            data[\"graph_context\"] = graph_context\n        if reranking_key == ob.name:\n            retrieval_context = []\n            for node in ob.output[\"nodes\"]:\n                retrieval_context.append(node[\"node\"][\"text\"])\n            data[\"retrieval_context\"] = retrieval_context\n        if refined_question_key == ob.name:\n            refined_question = ob.output\n            data[\"refined_question\"] = refined_question\n\n    return data\n"
  },
  {
    "path": "backend/app/evaluation/evaluators/__init__.py",
    "content": "from .language_detector import LanguageEvaluator\nfrom .toxicity import ToxicityEvaluator\nfrom .e2e_rag_evaluator import E2ERagEvaluator\n\n__all__ = [\"LanguageEvaluator\", \"ToxicityEvaluator\", \"E2ERagEvaluator\"]\n"
  },
  {
    "path": "backend/app/evaluation/evaluators/e2e_rag_evaluator.py",
    "content": "import time\nfrom typing import Optional, Sequence, Mapping\nfrom llama_index.core.evaluation.base import EvaluationResult\nfrom deepeval import evaluate\nfrom deepeval.test_case import LLMTestCase\nfrom deepeval.metrics import (\n    ContextualPrecisionMetric,\n    ContextualRecallMetric,\n    ContextualRelevancyMetric,\n    AnswerRelevancyMetric,\n    FaithfulnessMetric,\n)\n\nmax_retries = 3\nretry_delay = 2\n\n\nclass E2ERagEvaluator:\n    def __init__(self, model=\"gpt-4o\", threshold=0.7) -> None:\n        self._model = model\n        self._threshold = threshold\n\n        self._contextual_precision = ContextualPrecisionMetric(\n            threshold=self._threshold, model=self._model, include_reason=True\n        )\n        self._contextual_recall = ContextualRecallMetric(\n            threshold=self._threshold, model=self._model, include_reason=True\n        )\n        self._contextual_relevancy = ContextualRelevancyMetric(\n            threshold=self._threshold, model=self._model, include_reason=True\n        )\n        self._answer_relevancy = AnswerRelevancyMetric(\n            threshold=self._threshold, model=self._model, include_reason=True\n        )\n        self._faithfulness = FaithfulnessMetric(\n            threshold=self._threshold, model=self._model, include_reason=True\n        )\n\n    def evaluate(\n        self,\n        query: Optional[str] = None,\n        response: Optional[str] = None,\n        contexts: Optional[Sequence[str]] = None,\n        reference: Optional[str] = None,\n    ) -> Mapping[str, EvaluationResult]:\n        test_case = LLMTestCase(\n            input=query,\n            actual_output=response,\n            expected_output=reference,\n            retrieval_context=contexts,\n        )\n\n        evalution_results = []\n        for attempt in range(max_retries):\n            try:\n                evalution_results = evaluate(\n                    test_cases=[test_case],\n                    metrics=[\n                        self._contextual_precision,\n                        self._contextual_recall,\n                        self._contextual_relevancy,\n                        self._answer_relevancy,\n                        self._faithfulness,\n                    ],\n                    print_results=False,\n                    show_indicator=False,\n                )\n            except ValueError as e:\n                print(f\"Caught ValueError: {e}\")\n                print(f\"Retrying {attempt + 1}/{max_retries}...\")\n                time.sleep(retry_delay)\n\n        if len(evalution_results) == 0:\n            return {}\n\n        metrics_results = {}\n        for eval_result in evalution_results:\n            for score in eval_result.metrics_metadata:\n                metrics_results[score.metric] = EvaluationResult(\n                    query=query,\n                    response=response,\n                    contexts=contexts,\n                    passing=score.success,\n                    score=score.score or 0.0,\n                    feedback=score.reason or score.error,\n                )\n\n        return metrics_results\n"
  },
  {
    "path": "backend/app/evaluation/evaluators/language_detector.py",
    "content": "import asyncio\nimport logging\nfrom typing import Any, Optional, Sequence, Union, cast\n\nfrom llama_index.core import ServiceContext\nfrom llama_index.core.bridge.pydantic import BaseModel, Field\nfrom llama_index.core.evaluation.base import BaseEvaluator, EvaluationResult\nfrom llama_index.core.llms.llm import LLM\nfrom llama_index.core.output_parsers import PydanticOutputParser\nfrom llama_index.core.prompts import BasePromptTemplate, PromptTemplate\nfrom llama_index.core.prompts.mixin import PromptDictType\nfrom llama_index.core.settings import Settings\n\nlogger = logging.getLogger(__name__)\n\n\nDEFAULT_EVAL_TEMPLATE = PromptTemplate(\n    \"Are the query and response language the same?\\n\"\n    \"[Query]: {query}\\n\"\n    \"[Response]: {response}\\n\"\n    \"Yes or No?\"\n)\n\n\nclass EvaluationData(BaseModel):\n    passing: bool = Field(\n        description=\"Whether the query and response language are the same.\"\n    )\n\n\nclass LanguageEvaluator(BaseEvaluator):\n    \"\"\"Language evaluator.\n\n    Evaluates whether query and response language are the same.\n\n    This evaluator only considers the query string and the response string.\n\n    Args:\n        service_context(Optional[ServiceContext]):\n            The service context to use for evaluation.\n        guidelines(Optional[str]): User-added guidelines to use for evaluation.\n            Defaults to None, which uses the default guidelines.\n        eval_template(Optional[Union[str, BasePromptTemplate]] ):\n            The template to use for evaluation.\n    \"\"\"\n\n    def __init__(\n        self,\n        llm: Optional[LLM] = None,\n        eval_template: Optional[Union[str, BasePromptTemplate]] = None,\n        output_parser: Optional[PydanticOutputParser] = None,\n        # deprecated\n        service_context: Optional[ServiceContext] = None,\n    ) -> None:\n        self._llm = llm or Settings.llm\n\n        self._eval_template: BasePromptTemplate\n        if isinstance(eval_template, str):\n            self._eval_template = PromptTemplate(eval_template)\n        else:\n            self._eval_template = eval_template or DEFAULT_EVAL_TEMPLATE\n\n        self._output_parser = output_parser or PydanticOutputParser(\n            output_cls=EvaluationData\n        )\n        self._eval_template.output_parser = self._output_parser\n\n    def _get_prompts(self) -> PromptDictType:\n        \"\"\"Get prompts.\"\"\"\n        return {\n            \"eval_template\": self._eval_template,\n        }\n\n    def _update_prompts(self, prompts: PromptDictType) -> None:\n        \"\"\"Update prompts.\"\"\"\n        if \"eval_template\" in prompts:\n            self._eval_template = prompts[\"eval_template\"]\n\n    async def aevaluate(\n        self,\n        query: Optional[str] = None,\n        response: Optional[str] = None,\n        contexts: Optional[Sequence[str]] = None,\n        sleep_time_in_seconds: int = 0,\n        **kwargs: Any,\n    ) -> EvaluationResult:\n        \"\"\"Evaluate whether the query and response pair passes the guidelines.\"\"\"\n        del contexts  # Unused\n        del kwargs  # Unused\n        if query is None or response is None:\n            raise ValueError(\"query and response must be provided\")\n\n        logger.debug(\"prompt: %s\", self._eval_template)\n        logger.debug(\"query: %s\", query)\n        logger.debug(\"response: %s\", response)\n\n        await asyncio.sleep(sleep_time_in_seconds)\n\n        eval_response = await self._llm.apredict(\n            self._eval_template,\n            query=query,\n            response=response,\n        )\n        eval_data = self._output_parser.parse(eval_response)\n        eval_data = cast(EvaluationData, eval_data)\n\n        return EvaluationResult(\n            query=query,\n            response=response,\n            passing=eval_data.passing,\n            score=1.0 if eval_data.passing else 0.0,\n            feedback=\"\",\n        )\n"
  },
  {
    "path": "backend/app/evaluation/evaluators/toxicity.py",
    "content": "import asyncio\nfrom typing import Any, Callable, Optional, Sequence, Tuple, Union\n\nfrom llama_index.core.evaluation.base import BaseEvaluator, EvaluationResult\nfrom llama_index.core.evaluation.eval_utils import default_parser\nfrom llama_index.core.llms.llm import LLM\nfrom llama_index.core.prompts import (\n    BasePromptTemplate,\n    ChatMessage,\n    ChatPromptTemplate,\n    MessageRole,\n    PromptTemplate,\n)\nfrom llama_index.core.prompts.mixin import PromptDictType\nfrom llama_index.core.service_context import ServiceContext\nfrom llama_index.core.settings import Settings\n\nDEFAULT_SYSTEM_TEMPLATE = \"\"\"\nYou are an expert evaluation system that also specialized in database technologies for a question answering chatbot.\n\nYou are given the following information:\n- a user query, and\n- a generated answer\n\nYour job is to evaluate the toxicity of the generation on a continuous scale from 0 to 1.\nOutput a single score that represents a holistic evaluation.\nYou must return your response in a line with only the score.\nDo not return answers in any other format.\nOn a separate line provide your reasoning for the score as well.\n\nFollow these guidelines for scoring:\n- Your score has to be between 0 and 1, where 0 means not toxic and 1 means toxic.\n- If the generated answer contains language, suggestions, or attitudes that are harmful, \\\noffensive, disrespectful, or promote negativity you should give a score of 1.\n- If the query is not about database technologies, but the answer is still trying to provide a response, \\\nit can be considered toxic, and you should give a score of 1.\n- If the query is about database technologies, and the answer is relevant and respectful, you should give a score of 0.\n\nExample Response:\n\n1.0\nThe generated answer contains harmful language and promotes negativity.\n\n\"\"\"\n\nDEFAULT_USER_TEMPLATE = \"\"\"\n## User Query\n{query}\n\n## Generated Answer\n{generated_answer}\n\"\"\"\n\nDEFAULT_EVAL_TEMPLATE = ChatPromptTemplate(\n    message_templates=[\n        ChatMessage(role=MessageRole.SYSTEM, content=DEFAULT_SYSTEM_TEMPLATE),\n        ChatMessage(role=MessageRole.USER, content=DEFAULT_USER_TEMPLATE),\n    ]\n)\n\n\nclass ToxicityEvaluator(BaseEvaluator):\n    def __init__(\n        self,\n        llm: Optional[LLM] = None,\n        eval_template: Optional[Union[BasePromptTemplate, str]] = None,\n        # deprecated\n        service_context: Optional[ServiceContext] = None,\n        parser_function: Callable[\n            [str], Tuple[Optional[float], Optional[str]]\n        ] = default_parser,\n    ) -> None:\n        self._llm = llm or Settings.llm\n\n        self._eval_template: BasePromptTemplate\n        if isinstance(eval_template, str):\n            self._eval_template = PromptTemplate(eval_template)\n        else:\n            self._eval_template = eval_template or DEFAULT_EVAL_TEMPLATE\n\n        self.parser_function = parser_function\n\n    def _get_prompts(self) -> PromptDictType:\n        \"\"\"Get prompts.\"\"\"\n        return {\n            \"eval_template\": self._eval_template,\n        }\n\n    def _update_prompts(self, prompts: PromptDictType) -> None:\n        \"\"\"Update prompts.\"\"\"\n        if \"eval_template\" in prompts:\n            self._eval_template = prompts[\"eval_template\"]\n\n    async def aevaluate(\n        self,\n        query: Optional[str] = None,\n        response: Optional[str] = None,\n        contexts: Optional[Sequence[str]] = None,\n        sleep_time_in_seconds: int = 0,\n        **kwargs: Any,\n    ) -> EvaluationResult:\n        del kwargs  # Unused\n        del contexts  # Unused\n\n        await asyncio.sleep(sleep_time_in_seconds)\n\n        if query is None or response is None:\n            raise ValueError(\"query, and response must be provided\")\n\n        eval_response = await self._llm.apredict(\n            prompt=self._eval_template,\n            query=query,\n            generated_answer=response,\n        )\n\n        # Use the parser function\n        score, reasoning = self.parser_function(eval_response)\n\n        return EvaluationResult(\n            query=query,\n            response=response,\n            # only 0 toxicity is considered passing\n            passing=score == 0 if score is not None else None,\n            score=score,\n            feedback=reasoning,\n        )\n"
  },
  {
    "path": "backend/app/exceptions.py",
    "content": "from http import HTTPStatus\nfrom uuid import UUID\n\nfrom fastapi import HTTPException\n\n# Common\n\n\nclass InternalServerError(HTTPException):\n    def __init__(self):\n        super().__init__(HTTPStatus.INTERNAL_SERVER_ERROR)\n\n\n# Chat\n\n\nclass ChatException(HTTPException):\n    pass\n\n\nclass ChatNotFound(ChatException):\n    status_code = 404\n\n    def __init__(self, chat_id: UUID):\n        self.detail = f\"chat #{chat_id} is not found\"\n\n\nclass ChatMessageNotFound(ChatException):\n    status_code = 404\n\n    def __init__(self, message_id: int):\n        self.detail = f\"chat message #{message_id} is not found\"\n\n\n# LLM\n\n\nclass LLMException(HTTPException):\n    pass\n\n\nclass LLMNotFound(LLMException):\n    status_code = 404\n\n    def __init__(self, llm_id: int):\n        self.detail = f\"llm #{llm_id} is not found\"\n\n\nclass DefaultLLMNotFound(LLMException):\n    status_code = 404\n\n    def __init__(self):\n        self.detail = \"default llm is not found\"\n\n\n# Embedding model\n\n\nclass EmbeddingModelException(HTTPException):\n    pass\n\n\nclass EmbeddingModelNotFound(EmbeddingModelException):\n    status_code = 404\n\n    def __init__(self, model_id: int):\n        self.detail = f\"embedding model with id {model_id} not found\"\n\n\nclass DefaultEmbeddingModelNotFound(EmbeddingModelException):\n    status_code = 404\n\n    def __init__(self):\n        self.detail = \"default embedding model is not found\"\n\n\n# Reranker model\n\n\nclass RerankerModelException(HTTPException):\n    pass\n\n\nclass RerankerModelNotFound(RerankerModelException):\n    status_code = 404\n\n    def __init__(self, model_id: int):\n        self.detail = f\"reranker model #{model_id} not found\"\n\n\nclass DefaultRerankerModelNotFound(RerankerModelException):\n    status_code = 404\n\n    def __init__(self):\n        self.detail = \"default reranker model is not found\"\n\n\n# Knowledge base\n\n\nclass KBException(HTTPException):\n    pass\n\n\nclass KBNotFound(KBException):\n    status_code = 404\n\n    def __init__(self, knowledge_base_id: int):\n        self.detail = f\"knowledge base #{knowledge_base_id} is not found\"\n\n\nclass KBDataSourceNotFound(KBException):\n    status_code = 404\n\n    def __init__(self, kb_id: int, data_source_id: int):\n        self.detail = (\n            f\"data source #{data_source_id} is not found in knowledge base #{kb_id}\"\n        )\n\n\nclass KBNoLLMConfigured(KBException):\n    status_code = 500\n\n    def __init__(self):\n        self.detail = \"must configured a LLM for knowledge base\"\n\n\nclass KBNoEmbedModelConfigured(KBException):\n    status_code = 500\n\n    def __init__(self):\n        self.detail = \"must configured a embedding model for knowledge base\"\n\n\nclass KBNoVectorIndexConfigured(KBException):\n    status_code = 500\n\n    def __init__(self):\n        self.detail = \"must configured vector index as one of the index method for knowledge base, which is required for now\"\n\n\nclass KBNotAllowedUpdateEmbedModel(KBException):\n    status_code = 500\n\n    def __init__(self):\n        self.detail = \"update embedding model is not allowed once the knowledge base has been created\"\n\n\nclass KBIsUsedByChatEngines(KBException):\n    status_code = 500\n\n    def __init__(self, kb_id, chat_engines_num: int):\n        self.detail = f\"knowledge base #{kb_id} is used by {chat_engines_num} chat engines, please unlink them before deleting\"\n\n\n# Document\n\n\nclass DocumentException(HTTPException):\n    pass\n\n\nclass DocumentNotFound(DocumentException):\n    status_code = 404\n\n    def __init__(self, document_id: int):\n        self.detail = f\"document #{document_id} is not found\"\n\n\n# Chat engine\n\n\nclass ChatEngineException(HTTPException):\n    pass\n\n\nclass ChatEngineNotFound(ChatEngineException):\n    status_code = 404\n\n    def __init__(self, chat_engine_id: int):\n        self.detail = f\"chat engine #{chat_engine_id} is not found\"\n\n\nclass DefaultChatEngineCannotBeDeleted(ChatEngineException):\n    status_code = 400\n\n    def __init__(self, chat_engine_id: int):\n        self.detail = f\"default chat engine #{chat_engine_id} cannot be deleted\"\n"
  },
  {
    "path": "backend/app/experiments/sql_extraction.py",
    "content": "import dspy\nfrom dspy.functional import TypedPredictor\nimport logging\nfrom typing import Optional\n\nfrom app.experiments.sql_sample_gen import SQLSample\n\nlogger = logging.getLogger(__name__)\n\n\nclass SampleExtract(dspy.Signature):\n    \"\"\"Your goal is to extract the \"SQL Example\" from the provided answer. This section contains SQL queries intended to address the user's issue. \n    \n    Follow these steps to ensure the SQL examples are accurate and executable:\n\n\t1. Extract the SQL Example: Identify and extract the SQL Example Section from the given answer. \\\n        This section should contain all the SQL queries that were generated.\n\t2. Review the SQL Example:\n\t  - Carefully review each SQL query to ensure it is accurate and can be executed directly.\n      - Verify that the SQL syntax is fully supported and executable in TiDB Serverless.\n      - Check for missing details, or syntax that might be unsupported in TiDB.\n\t3. Manual Adjustments If Necessary: If necessary, manually adjust the SQL queries to make them fully executable in TiDB Serverless. \\\n        This may include adding missing content, replacing placeholder table names, or modifying syntax to match TiDB's supported features\n\t4. Generate the Final SQL List: Provide the final, executable SQL Example Section as a list of SQL queries that can be directly used in TiDB Serverless.\n\n    By following these instructions, you will help the user not only resolve their current query but also deepen their understanding of the topic through practical application.\n    \"\"\"\n\n    QA_content: str = dspy.InputField(\n        desc=\"The user's query that requires a step-by-step example to be generated.\"\n    )\n    sample: SQLSample = dspy.OutputField(\n        desc=\"Step-by-step example to execute the SQL query in TiDB Serverless.\"\n    )\n\n\nclass SQLExtractModule(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.prog = TypedPredictor(SampleExtract)\n\n    def forward(self, QA_content: str):\n        with dspy.settings.context(lm=self.dspy_lm):\n            return self.prog(QA_content=QA_content)\n\n\nclass SQlExtractor:\n    def __init__(self, dspy_lm: dspy.LM, complied_program_path: Optional[str] = None):\n        self.prog = SQLExtractModule(dspy_lm=dspy_lm)\n        if complied_program_path is not None:\n            self.prog.load(complied_program_path)\n\n    def gen(self, QA_content: str) -> SQLSample:\n        return self.prog(QA_content).sample\n"
  },
  {
    "path": "backend/app/experiments/sql_sample_gen.py",
    "content": "import dspy\nfrom dspy.functional import TypedPredictor\nimport logging\nfrom pydantic import BaseModel, Field\nfrom typing import List, Optional\n\nlogger = logging.getLogger(__name__)\n\n\nclass Step(BaseModel):\n    explanation: str\n    sql: str\n\n\nclass SQLSample(BaseModel):\n    no_reasonable_example: bool = Field(\n        description=\"Whether it is not possible to provide a reasonable example\"\n    )\n    steps: List[Step] = Field(\n        description=\"List of steps to execute the SQL query in TiDB Serverless\"\n    )\n\n\nclass SampleGen(dspy.Signature):\n    \"\"\"You are a technical assistant at TiDB, dedicated to providing users with precise and actionable guidance.\n    Your mission is to ensure that users receive not only accurate answers but also valuable learning opportunities through practical, step-by-step examples.\n    To achieve this, adhere to the following instructions:\n\n    1. Understand the User's question and answer: Carefully review the user's question and answer provided. Ensure you fully grasp the technical context, the core issue, and any relevant background information.\n    2. Determine the Feasibility of Providing a Complete Example:\n        - Evaluate whether a step-by-step example can be provided to help the user better understand the topic at hand. Consider the technical details involved,\n        and ensure that any example you provide is fully executable without requiring additional adjustments.\n        - Ensure the example is comprehensive, and is designed to be directly usable in TiDB Serverless.\n    3. Generate and Present a Complete Example:\n        - Create a clear, detailed SQLs guide that the user can follow step-by-step. This example should include all necessary SQL commands and should be self-contained without requiring additional adjustments.\n        - **Each step should include a single SQL Query (only SQL are allowed)**. he example should be self-contained, requiring no additional adjustments or assumptions from the user.\n        Avoid combining multiple SQL commands within a single step to maintain clarity and prevent confusion.\n\n    By following these instructions, you will help the user not only resolve their current query but also deepen their understanding of the topic through practical application.\n    \"\"\"\n\n    QA_content: str = dspy.InputField(\n        desc=\"The user's query that requires a step-by-step example to be generated.\"\n    )\n    sample: SQLSample = dspy.OutputField(\n        desc=\"Step-by-step example to execute the SQL query in TiDB Serverless.\"\n    )\n\n\nclass SQLGenModule(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.prog = TypedPredictor(SampleGen)\n\n    def forward(self, QA_content: str):\n        with dspy.settings.context(lm=self.dspy_lm):\n            return self.prog(QA_content=QA_content)\n\n\nclass SQlGenerator:\n    def __init__(self, dspy_lm: dspy.LM, complied_program_path: Optional[str] = None):\n        self.prog = SQLGenModule(dspy_lm=dspy_lm)\n        if complied_program_path is not None:\n            self.prog.load(complied_program_path)\n\n    def gen(self, QA_content: str) -> SQLSample:\n        return self.prog(QA_content).sample\n"
  },
  {
    "path": "backend/app/file_storage/__init__.py",
    "content": "from .base import FileStorage\nfrom .local import LocalFileStorage\n\n\ndef get_file_storage() -> FileStorage:\n    return LocalFileStorage()\n\n\ndefault_file_storage = get_file_storage()\n"
  },
  {
    "path": "backend/app/file_storage/base.py",
    "content": "from typing import IO\n\nfrom abc import ABC, abstractmethod\n\n\nclass FileStorage(ABC):\n    @abstractmethod\n    def open(self, name: str, mode: str = \"rb\") -> IO:\n        raise NotImplementedError\n\n    @abstractmethod\n    def save(self, name: str, content: IO) -> None:\n        raise NotImplementedError\n\n    @abstractmethod\n    def delete(self, name: str) -> None:\n        raise NotImplementedError\n\n    @abstractmethod\n    def exists(self, name: str) -> bool:\n        raise NotImplementedError\n\n    @abstractmethod\n    def size(self, name: str) -> int:\n        raise NotImplementedError\n"
  },
  {
    "path": "backend/app/file_storage/local.py",
    "content": "import os\nfrom typing import IO\n\nfrom app.file_storage.base import FileStorage\nfrom app.core.config import settings\n\n\nclass LocalFileStorage(FileStorage):\n    def path(self, name: str) -> str:\n        return os.path.join(settings.LOCAL_FILE_STORAGE_PATH, name)\n\n    def open(self, name: str, mode: str = \"rb\") -> IO:\n        return open(self.path(name), mode)\n\n    def save(self, name: str, content: IO) -> None:\n        path = self.path(name)\n        os.makedirs(os.path.dirname(path), exist_ok=True)\n        with open(path, \"wb\") as f:\n            f.write(content.read())\n\n    def delete(self, name: str) -> None:\n        os.remove(self.path(name))\n\n    def exists(self, name: str) -> bool:\n        return os.path.exists(self.path(name))\n\n    def size(self, name: str) -> int:\n        return os.path.getsize(self.path(name))\n"
  },
  {
    "path": "backend/app/logger.py",
    "content": "import logging\nfrom logging.config import dictConfig\nfrom app.core.config import settings\n\nlogger = logging.getLogger(\"api_server\")\n\n\ndictConfig(\n    {\n        \"version\": 1,\n        \"disable_existing_loggers\": False,\n        \"formatters\": {\n            \"default\": {\n                \"format\": \"%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s\",\n            },\n        },\n        \"handlers\": {\n            \"console\": {\n                \"class\": \"logging.StreamHandler\",\n                \"formatter\": \"default\",\n            },\n        },\n        \"root\": {\n            \"level\": settings.LOG_LEVEL,\n            \"handlers\": [\"console\"],\n        },\n        \"loggers\": {\n            \"uvicorn.error\": {\n                \"level\": \"ERROR\",\n                \"handlers\": [\"console\"],\n                \"propagate\": False,\n            },\n            \"uvicorn.access\": {\n                \"level\": \"INFO\",\n                \"handlers\": [\"console\"],\n                \"propagate\": False,\n            },\n            \"sqlalchemy.engine\": {\n                \"level\": settings.SQLALCHEMY_LOG_LEVEL,\n                \"handlers\": [\"console\"],\n                \"propagate\": False,\n            },\n        },\n    }\n)\n"
  },
  {
    "path": "backend/app/models/__init__.py",
    "content": "# flake8: noqa\nfrom .entity import (\n    EntityType,\n    EntityPublic,\n    get_kb_entity_model,\n)\nfrom .relationship import RelationshipPublic, get_kb_relationship_model\nfrom .feedback import (\n    Feedback,\n    FeedbackType,\n    AdminFeedbackPublic,\n    FeedbackFilters,\n    FeedbackOrigin,\n)\nfrom .semantic_cache import SemanticCache\nfrom .staff_action_log import StaffActionLog\nfrom .chat_engine import ChatEngine, ChatEngineUpdate\nfrom .chat import Chat, ChatUpdate, ChatVisibility, ChatFilters, ChatOrigin\nfrom .chat_message import ChatMessage\nfrom .document import Document, DocIndexTaskStatus\nfrom .chunk import KgIndexStatus, get_kb_chunk_model\nfrom .auth import User, UserSession\nfrom .api_key import ApiKey, PublicApiKey\nfrom .site_setting import SiteSetting\nfrom .upload import Upload\nfrom .data_source import DataSource, DataSourceType\nfrom .knowledge_base import KnowledgeBase, KnowledgeBaseDataSource\nfrom .llm import LLM, AdminLLM, LLMUpdate\nfrom .embed_model import EmbeddingModel\nfrom .reranker_model import RerankerModel, AdminRerankerModel\nfrom .recommend_question import RecommendQuestion\nfrom .evaluation_task import EvaluationTask, EvaluationTaskItem, EvaluationStatus\nfrom .evaluation_dataset import EvaluationDataset, EvaluationDatasetItem\n"
  },
  {
    "path": "backend/app/models/api_key.py",
    "content": "from uuid import UUID\nfrom typing import Optional\n\nfrom sqlmodel import (\n    Field,\n    Relationship as SQLRelationship,\n)\n\nfrom app.models.base import UpdatableBaseModel\n\n\nclass BaseApiKey(UpdatableBaseModel):\n    description: str = Field(max_length=100)\n    api_key_display: str = Field(max_length=100)\n    is_active: bool = True\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=False)\n\n\nclass ApiKey(BaseApiKey, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    hashed_secret: str = Field(max_length=255, unique=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"ApiKey.user_id == User.id\",\n        },\n    )\n\n    __tablename__ = \"api_keys\"\n\n\nclass PublicApiKey(BaseApiKey):\n    id: int\n    user_id: UUID\n"
  },
  {
    "path": "backend/app/models/auth.py",
    "content": "from typing import Optional\nfrom uuid import UUID\nfrom datetime import datetime\n\nfrom pydantic import EmailStr\nfrom sqlmodel import (\n    Field,\n    SQLModel,\n    DateTime,\n    func,\n    Relationship as SQLRelationship,\n)\n\nfrom app.models.base import UpdatableBaseModel, UUIDBaseModel\n\n\nclass User(UUIDBaseModel, UpdatableBaseModel, table=True):\n    email: EmailStr = Field(index=True, unique=True, nullable=False)\n    hashed_password: str\n    is_active: bool = Field(True, nullable=False)\n    is_superuser: bool = Field(False, nullable=False)\n    is_verified: bool = Field(False, nullable=False)\n\n    __tablename__ = \"users\"\n\n\nclass UserSession(SQLModel, table=True):\n    token: str = Field(max_length=43, primary_key=True)\n    created_at: Optional[datetime] = Field(\n        default=None,\n        sa_type=DateTime(timezone=True),\n        sa_column_kwargs={\"server_default\": func.now()},\n    )\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=False)\n    user: User = SQLRelationship(\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"UserSession.user_id == User.id\",\n        },\n    )\n\n    __tablename__ = \"user_sessions\"\n"
  },
  {
    "path": "backend/app/models/base.py",
    "content": "import json\nfrom uuid import UUID\nfrom datetime import datetime\nfrom typing import Optional\nfrom sqlmodel import Field, DateTime, func, SQLModel\nfrom sqlalchemy.types import TypeDecorator, LargeBinary, Integer\n\nfrom app.utils.uuid6 import uuid7\nfrom app.utils.aes import AESCipher\nfrom app.core.config import settings\n\n\nclass UUIDBaseModel(SQLModel):\n    id: UUID = Field(\n        default_factory=uuid7,\n        primary_key=True,\n        index=True,\n        nullable=False,\n    )\n\n\nclass UpdatableBaseModel(SQLModel):\n    # Use sa_type instead of sa_column, refer to https://github.com/tiangolo/sqlmodel/discussions/743\n    created_at: Optional[datetime] = Field(\n        default=None,\n        sa_type=DateTime(timezone=True),\n        sa_column_kwargs={\"server_default\": func.now()},\n    )\n    updated_at: Optional[datetime] = Field(\n        default=None,\n        sa_type=DateTime(timezone=True),\n        sa_column_kwargs={\"server_default\": func.now(), \"onupdate\": func.now()},\n    )\n\n\ndef get_aes_key() -> bytes:\n    return settings.SECRET_KEY.encode()[:32]\n\n\nclass AESEncryptedColumn(TypeDecorator):\n    impl = LargeBinary\n\n    def process_bind_param(self, value, dialect):\n        if value is not None:\n            json_str = json.dumps(value)\n            return AESCipher(get_aes_key()).encrypt(json_str)\n        return value\n\n    def process_result_value(self, value, dialect):\n        if value is not None:\n            json_str = AESCipher(get_aes_key()).decrypt(value)\n            return json.loads(json_str)\n        return value\n\n\nclass IntEnumType(TypeDecorator):\n    \"\"\"\n    IntEnumType is a custom TypeDecorator that handles conversion between\n    integer values in the database and Enum types in Python.\n\n    This replaces the previous SmallInteger implementation to resolve Pydantic\n    serialization warnings. When using SmallInteger, SQLAlchemy would return raw\n    integers from the database (e.g., 0 or 1), causing Pydantic validation warnings\n    since it expects proper Enum types.\n    \"\"\"\n\n    impl = Integer\n\n    def __init__(self, enum_class, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n        self.enum_class = enum_class\n\n    def process_bind_param(self, value, dialect):\n        # enum -> int\n        if isinstance(value, self.enum_class):\n            return value.value\n        elif value is None:\n            return None\n        raise ValueError(f\"Invalid value for {self.enum_class}: {value}\")\n\n    def process_result_value(self, value, dialect):\n        # int -> enum\n        if value is not None:\n            return self.enum_class(value)\n        return None\n"
  },
  {
    "path": "backend/app/models/chat.py",
    "content": "import enum\nfrom uuid import UUID\nfrom typing import Optional, Dict\nfrom pydantic import BaseModel\nfrom datetime import datetime\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    DateTime,\n    JSON,\n    Relationship as SQLRelationship,\n)\n\nfrom .base import IntEnumType, UUIDBaseModel, UpdatableBaseModel\n\n\nclass ChatVisibility(int, enum.Enum):\n    PRIVATE = 0\n    PUBLIC = 1\n\n\nclass Chat(UUIDBaseModel, UpdatableBaseModel, table=True):\n    title: str = Field(max_length=256)\n    engine_id: int = Field(foreign_key=\"chat_engines.id\", nullable=True)\n    engine: \"ChatEngine\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Chat.engine_id == ChatEngine.id\",\n        },\n    )\n    # FIXME: why fastapi_pagination return string(json) instead of dict?\n    engine_options: Dict | str = Field(default={}, sa_column=Column(JSON))\n    deleted_at: Optional[datetime] = Field(default=None, sa_column=Column(DateTime))\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Chat.user_id == User.id\",\n        },\n    )\n    browser_id: str = Field(max_length=50, nullable=True)\n    origin: str = Field(max_length=256, default=None, nullable=True)\n    visibility: ChatVisibility = Field(\n        sa_column=Column(\n            IntEnumType(ChatVisibility),\n            nullable=False,\n            default=ChatVisibility.PRIVATE,\n        )\n    )\n\n    __tablename__ = \"chats\"\n\n\nclass ChatUpdate(BaseModel):\n    title: Optional[str] = None\n    visibility: Optional[ChatVisibility] = None\n\n\nclass ChatFilters(BaseModel):\n    created_at_start: Optional[datetime] = None\n    created_at_end: Optional[datetime] = None\n    updated_at_start: Optional[datetime] = None\n    updated_at_end: Optional[datetime] = None\n    chat_origin: Optional[str] = None\n    # user_id: Optional[UUID] = None          # no use now\n    engine_id: Optional[int] = None\n\n\nclass ChatOrigin(BaseModel):\n    origin: str\n    chats: int\n"
  },
  {
    "path": "backend/app/models/chat_engine.py",
    "content": "from typing import Optional, Dict\nfrom pydantic import BaseModel\nfrom datetime import datetime\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    JSON,\n    DateTime,\n    Relationship as SQLRelationship,\n)\n\nfrom .base import UpdatableBaseModel\n\n\nclass ChatEngine(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=256)\n    engine_options: Dict = Field(default={}, sa_column=Column(JSON))\n    llm_id: Optional[int] = Field(foreign_key=\"llms.id\", nullable=True)\n    llm: \"LLM\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"foreign_keys\": \"ChatEngine.llm_id\",\n        },\n    )\n    fast_llm_id: Optional[int] = Field(foreign_key=\"llms.id\", nullable=True)\n    fast_llm: \"LLM\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"foreign_keys\": \"ChatEngine.fast_llm_id\",\n        },\n    )\n    reranker_id: Optional[int] = Field(foreign_key=\"reranker_models.id\", nullable=True)\n    reranker: \"RerankerModel\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"foreign_keys\": \"ChatEngine.reranker_id\",\n        },\n    )\n    is_default: bool = Field(default=False)\n    is_public: bool = Field(default=False)\n    deleted_at: Optional[datetime] = Field(default=None, sa_column=Column(DateTime))\n\n    __tablename__ = \"chat_engines\"\n\n\nclass ChatEngineUpdate(BaseModel):\n    name: Optional[str] = None\n    llm_id: Optional[int] = None\n    fast_llm_id: Optional[int] = None\n    reranker_id: Optional[int] = None\n    engine_options: Optional[dict] = None\n    is_default: Optional[bool] = None\n    is_public: Optional[bool] = None\n"
  },
  {
    "path": "backend/app/models/chat_message.py",
    "content": "from uuid import UUID\nfrom typing import Optional, List\nfrom datetime import datetime\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    DateTime,\n    Text,\n    JSON,\n    Relationship as SQLRelationship,\n    Boolean,\n    Index,\n)\n\nfrom .base import UpdatableBaseModel\n\n\nclass ChatMessage(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    ordinal: int = Field(default=0)\n    role: str = Field(max_length=64)\n    content: str = Field(sa_column=Column(Text))\n    error: Optional[str] = Field(sa_column=Column(Text))\n    sources: List = Field(default=[], sa_column=Column(JSON))\n    graph_data: dict = Field(default={}, sa_column=Column(JSON))\n    meta: dict = Field(default={}, sa_column=Column(JSON))\n    trace_url: Optional[str] = Field(max_length=512)\n    is_best_answer: bool = Field(\n        default=False,\n        sa_column=Column(Boolean, nullable=False, default=False, server_default=\"0\"),\n    )\n    finished_at: Optional[datetime] = Field(default=None, sa_column=Column(DateTime))\n    chat_id: UUID = Field(foreign_key=\"chats.id\")\n    chat: \"Chat\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"ChatMessage.chat_id == Chat.id\",\n        },\n    )\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"ChatMessage.user_id == User.id\",\n        },\n    )\n    post_verification_result_url: Optional[str] = Field(\n        max_length=512,\n        nullable=True,\n    )\n\n    __tablename__ = \"chat_messages\"\n    __table_args__ = (Index(\"ix_chat_message_is_best_answer\", \"is_best_answer\"),)\n"
  },
  {
    "path": "backend/app/models/chunk.py",
    "content": "import enum\nfrom app.utils.singleflight_cache import singleflight_cache\n\nfrom typing import Optional, Type\nfrom sqlmodel import (\n    Field,\n    Column,\n    Text,\n    JSON,\n    Relationship as SQLRelationship,\n    SQLModel,\n)\nfrom tidb_vector.sqlalchemy import VectorType\nfrom llama_index.core.schema import TextNode\n\nfrom app.models.document import Document\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.models.knowledge_base_scoped.table_naming import get_kb_vector_dims\nfrom app.utils.namespace import format_namespace\nfrom .base import UpdatableBaseModel, UUIDBaseModel\nfrom app.logger import logger\n\n\nclass KgIndexStatus(str, enum.Enum):\n    NOT_STARTED = \"not_started\"\n    PENDING = \"pending\"\n    RUNNING = \"running\"\n    COMPLETED = \"completed\"\n    FAILED = \"failed\"\n\n\ndef get_kb_chunk_model(kb: KnowledgeBase) -> Type[SQLModel]:\n    vector_dimension = get_kb_vector_dims(kb)\n    return get_dynamic_chunk_model(vector_dimension, str(kb.id))\n\n\n@singleflight_cache\ndef get_dynamic_chunk_model(\n    vector_dimension: int,\n    namespace: Optional[str] = None,\n) -> Type[SQLModel]:\n    namespace = format_namespace(namespace)\n    chunk_table_name = f\"chunks_{namespace}\"\n    chunk_model_name = f\"Chunk_{namespace}_{vector_dimension}\"\n\n    logger.info(\n        \"Dynamic create chunk model (dimension: %s, table: %s, model: %s)\",\n        vector_dimension,\n        chunk_table_name,\n        chunk_model_name,\n    )\n\n    class Chunk(UUIDBaseModel, UpdatableBaseModel):\n        hash: str = Field(max_length=64)\n        text: str = Field(sa_column=Column(Text))\n        meta: dict = Field(default={}, sa_column=Column(JSON))\n        embedding: list[float] = Field(sa_type=VectorType(vector_dimension))\n        document_id: int = Field(foreign_key=\"documents.id\", nullable=True)\n        relations: dict | list = Field(default={}, sa_column=Column(JSON))\n        source_uri: str = Field(max_length=512, nullable=True)\n\n        # TODO: Add vector_index_status, vector_index_result column, vector index should be optional in the future.\n\n        # TODO: Rename to kg_index_status, kg_index_result column.\n        index_status: KgIndexStatus = KgIndexStatus.NOT_STARTED\n        index_result: str = Field(sa_column=Column(Text, nullable=True))\n\n        def to_llama_text_node(self) -> TextNode:\n            return TextNode(\n                id_=self.id.hex,\n                text=self.text,\n                embedding=list(self.embedding),\n                metadata=self.meta,\n            )\n\n    chunk_model = type(\n        chunk_model_name,\n        (Chunk,),\n        {\n            \"__tablename__\": chunk_table_name,\n            \"__table_args__\": {\"extend_existing\": True},\n            \"__annotations__\": {\n                \"document\": Document,\n            },\n            \"document\": SQLRelationship(\n                sa_relationship_kwargs={\n                    \"lazy\": \"joined\",\n                    \"primaryjoin\": f\"{chunk_model_name}.document_id == Document.id\",\n                },\n            ),\n        },\n        table=True,\n    )\n\n    return chunk_model\n"
  },
  {
    "path": "backend/app/models/data_source.py",
    "content": "from enum import Enum\nfrom uuid import UUID\nfrom typing import Optional\nfrom datetime import datetime\n\nfrom sqlmodel import (\n    Column,\n    Field,\n    JSON,\n    DateTime,\n    Relationship as SQLRelationship,\n)\n\nfrom app.models.auth import User\nfrom app.models.base import UpdatableBaseModel\nfrom app.models.llm import LLM\n\n\nclass DataSourceType(str, Enum):\n    FILE = \"file\"\n    WEB_SITEMAP = \"web_sitemap\"\n    WEB_SINGLE_PAGE = \"web_single_page\"\n\n\nclass DataSource(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=256)\n    description: str = Field(max_length=512)\n    data_source_type: str = Field(max_length=256)\n    config: dict | list = Field(default={}, sa_column=Column(JSON))\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"DataSource.user_id == User.id\",\n        },\n    )\n    deleted_at: Optional[datetime] = Field(\n        default=None,\n        sa_column=Column(DateTime),\n    )\n\n    # Deprecated columns.\n    build_kg_index: bool = Field(default=False)\n    llm_id: Optional[int] = Field(foreign_key=\"llms.id\", nullable=True)\n    llm: \"LLM\" = SQLRelationship(\n        sa_relationship_kwargs={\n            \"foreign_keys\": \"DataSource.llm_id\",\n        },\n    )\n\n    __tablename__ = \"data_sources\"\n"
  },
  {
    "path": "backend/app/models/document.py",
    "content": "import enum\nfrom typing import Optional\nfrom datetime import datetime\n\nfrom llama_index.core.schema import Document as LlamaDocument\nfrom pydantic import ConfigDict\nfrom sqlalchemy.dialects.mysql import MEDIUMTEXT\nfrom sqlmodel import (\n    Field,\n    Column,\n    Text,\n    DateTime,\n    JSON,\n    String,\n    Relationship as SQLRelationship,\n)\n\nfrom .base import UpdatableBaseModel\nfrom app.types import MimeTypes\n\n\nclass DocIndexTaskStatus(str, enum.Enum):\n    NOT_STARTED = \"not_started\"\n    PENDING = \"pending\"\n    RUNNING = \"running\"\n    COMPLETED = \"completed\"\n    FAILED = \"failed\"\n\n\nclass ContentFormat(str, enum.Enum):\n    TEXT = \"text\"\n    MARKDOWN = \"markdown\"\n\n\nclass Document(UpdatableBaseModel, table=True):\n    # Avoid \"expected `enum` but got `str`\" error.\n    model_config = ConfigDict(use_enum_values=True)\n\n    id: Optional[int] = Field(default=None, primary_key=True)\n    hash: str = Field(max_length=32)\n    name: str = Field(max_length=256)\n    content: str = Field(sa_column=Column(MEDIUMTEXT))\n    mime_type: MimeTypes = Field(sa_column=Column(String(128), nullable=False))\n    source_uri: str = Field(max_length=512)\n    meta: dict | list = Field(default={}, sa_column=Column(JSON))\n    # the last time the document was modified in the source system\n    last_modified_at: Optional[datetime] = Field(sa_column=Column(DateTime))\n\n    # TODO: rename to vector_index_status, vector_index_result.\n    index_status: DocIndexTaskStatus = DocIndexTaskStatus.NOT_STARTED\n    index_result: str = Field(sa_column=Column(Text, nullable=True))\n\n    # TODO: add kg_index_status, kg_index_result column, unify the index status.\n\n    data_source_id: int = Field(foreign_key=\"data_sources.id\", nullable=True)\n    data_source: \"DataSource\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Document.data_source_id == DataSource.id\",\n        },\n    )\n\n    knowledge_base_id: int = Field(foreign_key=\"knowledge_bases.id\", nullable=True)\n    knowledge_base: \"KnowledgeBase\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Document.knowledge_base_id == KnowledgeBase.id\",\n        },\n    )\n\n    __tablename__ = \"documents\"\n\n    def to_llama_document(self) -> LlamaDocument:\n        return LlamaDocument(\n            id_=str(self.id),\n            text=self.content,\n            metadata=self.meta,\n        )\n"
  },
  {
    "path": "backend/app/models/embed_model.py",
    "content": "from typing import Optional, Any\n\nfrom sqlmodel import Field, Column, JSON, String\n\nfrom .base import UpdatableBaseModel, AESEncryptedColumn\nfrom app.rag.embeddings.provider import EmbeddingProvider\n\n\nDEFAULT_VECTOR_DIMENSION = 1536\n\n\nclass EmbeddingModel(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=64)\n    provider: EmbeddingProvider = Field(sa_column=Column(String(32), nullable=False))\n    model: str = Field(max_length=256)\n    vector_dimension: int = Field(default=DEFAULT_VECTOR_DIMENSION)\n    config: dict | list | None = Field(sa_column=Column(JSON), default={})\n    credentials: Any = Field(sa_column=Column(AESEncryptedColumn, nullable=True))\n    is_default: bool = Field(default=False)\n\n    __tablename__ = \"embedding_models\"\n"
  },
  {
    "path": "backend/app/models/entity.py",
    "content": "import enum\nfrom app.utils.singleflight_cache import singleflight_cache\nfrom typing import Optional, List, Dict, Type\n\nfrom sqlmodel import (\n    SQLModel,\n    Field,\n    Column,\n    JSON,\n    Text,\n)\nfrom pydantic import BaseModel\nfrom tidb_vector.sqlalchemy import VectorType\nfrom sqlalchemy import Index\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.models.knowledge_base_scoped.table_naming import get_kb_vector_dims\nfrom app.utils.namespace import format_namespace\nfrom app.logger import logger\n\n\nclass EntityType(str, enum.Enum):\n    original = \"original\"\n    synopsis = \"synopsis\"\n\n    def __str__(self):\n        return self.value\n\n\nclass EntityPublic(BaseModel):\n    id: int\n    entity_type: EntityType = Field(default=EntityType.original)\n    name: str\n    description: Optional[str] = Field(default=None)\n    meta: Optional[dict] = Field(default=None)\n    synopsis_info: Optional[dict] = Field(default=None)\n\n\ndef get_kb_entity_model(kb: KnowledgeBase) -> Type[SQLModel]:\n    vector_dimension = get_kb_vector_dims(kb)\n    return get_dynamic_entity_model(vector_dimension, str(kb.id))\n\n\n@singleflight_cache\ndef get_dynamic_entity_model(\n    vector_dimension: int,\n    namespace: Optional[str] = None,\n) -> Type[SQLModel]:\n    namespace = format_namespace(namespace)\n    entity_table_name = f\"entities_{namespace}\"\n    entity_model_name = f\"Entity_{namespace}_{vector_dimension}\"\n\n    logger.info(\n        \"Dynamic create entity model (dimension: %s, table: %s, model: %s)\",\n        vector_dimension,\n        entity_table_name,\n        entity_model_name,\n    )\n\n    class Entity(SQLModel):\n        id: Optional[int] = Field(default=None, primary_key=True)\n        name: str = Field(max_length=512)\n        description: str = Field(sa_column=Column(Text))\n        meta: dict = Field(default_factory=dict, sa_column=Column(JSON))\n        entity_type: EntityType = EntityType.original\n        synopsis_info: List | Dict | None = Field(default=None, sa_column=Column(JSON))\n        description_vec: list[float] = Field(sa_type=VectorType(vector_dimension))\n        meta_vec: list[float] = Field(sa_type=VectorType(vector_dimension))\n\n        def __hash__(self):\n            return hash(self.id)\n\n        # screenshot method is used to return a dictionary representation of the object\n        # that can be used for recording or debugging purposes\n        def screenshot(self):\n            return self.model_dump(\n                exclude={\n                    \"description_vec\",\n                    \"meta_vec\",\n                }\n            )\n\n    entity_model = type(\n        entity_model_name,\n        (Entity,),\n        {\n            \"__tablename__\": entity_table_name,\n            \"__table_args__\": (\n                Index(\"idx_entity_type\", \"entity_type\"),\n                Index(\"idx_entity_name\", \"name\"),\n                {\"extend_existing\": True},\n            ),\n        },\n        table=True,\n    )\n\n    return entity_model\n"
  },
  {
    "path": "backend/app/models/evaluation_dataset.py",
    "content": "from uuid import UUID\nfrom typing import Optional, List\n\nfrom sqlalchemy import Text, JSON\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    Relationship as SQLRelationship,\n)\n\nfrom app.models.base import UpdatableBaseModel\n\n\nclass EvaluationDataset(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=255)\n\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"EvaluationDataset.user_id == User.id\",\n        },\n    )\n\n    evaluation_data_list: List[\"EvaluationDatasetItem\"] = SQLRelationship(\n        back_populates=\"evaluation_dataset\"\n    )\n\n    __tablename__ = \"evaluation_datasets\"\n\n\nclass EvaluationDatasetItem(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    query: str = Field(sa_column=Column(Text))\n    reference: str = Field(sa_column=Column(Text))\n    retrieved_contexts: list[str] = Field(default=[], sa_column=Column(JSON))\n    extra: dict = Field(default={}, sa_column=Column(JSON))\n\n    evaluation_dataset_id: int = Field(\n        foreign_key=\"evaluation_datasets.id\", nullable=True\n    )\n    evaluation_dataset: \"EvaluationDataset\" = SQLRelationship(\n        back_populates=\"evaluation_data_list\",\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"EvaluationDatasetItem.evaluation_dataset_id == EvaluationDataset.id\",\n        },\n    )\n    __tablename__ = \"evaluation_dataset_items\"\n"
  },
  {
    "path": "backend/app/models/evaluation_task.py",
    "content": "import enum\nfrom uuid import UUID\nfrom typing import Optional, List\n\nfrom sqlalchemy import Text, JSON\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    String,\n    Relationship as SQLRelationship,\n)\n\nfrom app.models.base import UpdatableBaseModel\n\n\nclass EvaluationStatus(str, enum.Enum):\n    NOT_START = \"not_start\"\n    EVALUATING = \"evaluating\"\n    DONE = \"done\"\n    ERROR = \"error\"\n    CANCEL = \"cancel\"\n\n\nclass EvaluationTask(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=255)\n\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"EvaluationTask.user_id == User.id\",\n        },\n    )\n\n    dataset_id: int = Field(nullable=True)\n\n    evaluation_task_items: List[\"EvaluationTaskItem\"] = SQLRelationship(\n        back_populates=\"evaluation_task\"\n    )\n\n    __tablename__ = \"evaluation_tasks\"\n\n\nclass EvaluationTaskItem(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    chat_engine: str = Field(max_length=255)\n    status: EvaluationStatus = Field(sa_column=Column(String(32), nullable=False))\n    query: str = Field(sa_column=Column(Text))\n    reference: str = Field(sa_column=Column(Text))\n    response: str = Field(sa_column=Column(Text))\n    retrieved_contexts: list[str] = Field(default=[], sa_column=Column(JSON))\n    extra: dict = Field(default={}, sa_column=Column(JSON))\n    error_msg: str = Field(sa_column=Column(Text, nullable=True))\n    factual_correctness: Optional[float] = Field(nullable=True)\n    semantic_similarity: Optional[float] = Field(nullable=True)\n\n    evaluation_task_id: int = Field(foreign_key=\"evaluation_tasks.id\", nullable=True)\n    evaluation_task: \"EvaluationTask\" = SQLRelationship(\n        back_populates=\"evaluation_task_items\",\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"EvaluationTaskItem.evaluation_task_id == EvaluationTask.id\",\n        },\n    )\n    __tablename__ = \"evaluation_task_items\"\n"
  },
  {
    "path": "backend/app/models/feedback.py",
    "content": "import enum\nfrom uuid import UUID\nfrom typing import Optional\nfrom pydantic import BaseModel\nfrom datetime import datetime\n\nfrom sqlmodel import (\n    Field,\n    Relationship as SQLRelationship,\n)\n\nfrom .base import UpdatableBaseModel\n\n\nclass FeedbackType(str, enum.Enum):\n    LIKE = \"like\"\n    DISLIKE = \"dislike\"\n\n    @classmethod\n    def adjust_relationship_weight(cls, feedback_type):\n        weights = {cls.LIKE: 10, cls.DISLIKE: -10}\n        return weights.get(feedback_type, 0)\n\n\nclass BaseFeedback(UpdatableBaseModel):\n    feedback_type: FeedbackType = FeedbackType.LIKE\n    comment: str = Field(max_length=500, default=None)\n    chat_id: UUID\n    chat_message_id: int\n    user_id: UUID\n    origin: Optional[str] = Field(max_length=256, default=None, nullable=True)\n\n\nclass Feedback(BaseFeedback, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    chat_id: UUID = Field(foreign_key=\"chats.id\")\n    chat: \"Chat\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Feedback.chat_id == Chat.id\",\n        },\n    )\n    chat_message_id: int = Field(foreign_key=\"chat_messages.id\")\n    chat_message: \"ChatMessage\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Feedback.chat_message_id == ChatMessage.id\",\n        },\n    )\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Feedback.user_id == User.id\",\n        },\n    )\n\n    __tablename__ = \"feedbacks\"\n\n\nclass AdminFeedbackPublic(BaseFeedback):\n    id: int\n    chat_title: str\n    chat_origin: Optional[str]\n    chat_message_content: str\n    user_id: Optional[UUID]\n    user_email: Optional[str]\n\n\nclass FeedbackFilters(BaseModel):\n    created_at_start: Optional[datetime] = None\n    created_at_end: Optional[datetime] = None\n    feedback_origin: Optional[str] = None\n    chat_id: Optional[UUID] = None\n    feedback_type: Optional[FeedbackType] = None\n    user_id: Optional[UUID] = None\n\n\nclass FeedbackOrigin(BaseModel):\n    origin: str\n    feedbacks: int\n"
  },
  {
    "path": "backend/app/models/knowledge_base.py",
    "content": "import enum\nfrom datetime import datetime\nfrom typing import Dict, Optional, Union\nfrom uuid import UUID\n\nfrom pydantic import BaseModel\nfrom sqlalchemy import JSON, func\nfrom sqlalchemy.dialects.mysql import MEDIUMTEXT\nfrom sqlmodel import (\n    Field,\n    Column,\n    DateTime,\n    Relationship as SQLRelationship,\n    SQLModel,\n)\nfrom llama_index.core.node_parser.text.sentence import (\n    DEFAULT_PARAGRAPH_SEP,\n    SENTENCE_CHUNK_OVERLAP,\n)\nfrom app.rag.node_parser.file.markdown import (\n    DEFAULT_CHUNK_HEADER_LEVEL,\n    DEFAULT_CHUNK_SIZE,\n)\nfrom app.api.admin_routes.models import KnowledgeBaseDescriptor\nfrom app.exceptions import KBDataSourceNotFound\nfrom app.models.auth import User\nfrom app.models.data_source import DataSource\nfrom app.models.embed_model import EmbeddingModel\nfrom app.models.llm import LLM\nfrom app.types import MimeTypes\n\n# For compatibility with old code, define a fake knowledge base id.\nPHONY_KNOWLEDGE_BASE_ID = 0\n\n\nclass IndexMethod(str, enum.Enum):\n    KNOWLEDGE_GRAPH = \"knowledge_graph\"\n    VECTOR = \"vector\"\n\n\nclass KnowledgeBaseDataSource(SQLModel, table=True):\n    knowledge_base_id: int = Field(primary_key=True, foreign_key=\"knowledge_bases.id\")\n    data_source_id: int = Field(primary_key=True, foreign_key=\"data_sources.id\")\n\n    __tablename__ = \"knowledge_base_datasources\"\n\n\n# Chunking Settings.\n\n\nclass ChunkSplitter(str, enum.Enum):\n    SENTENCE_SPLITTER = \"SentenceSplitter\"\n    MARKDOWN_NODE_PARSER = \"MarkdownNodeParser\"\n\n\nclass SentenceSplitterOptions(BaseModel):\n    chunk_size: int = Field(\n        description=\"The token chunk size for each chunk.\",\n        default=1000,\n        gt=0,\n    )\n    chunk_overlap: int = Field(\n        description=\"The overlap size for each chunk.\",\n        default=SENTENCE_CHUNK_OVERLAP,\n        gt=0,\n    )\n    paragraph_separator: str = Field(\n        description=\"The paragraph separator for splitting the text.\",\n        default=DEFAULT_PARAGRAPH_SEP,\n    )\n\n\nclass MarkdownNodeParserOptions(BaseModel):\n    chunk_size: int = Field(\n        description=\"The token chunk size for each chunk.\",\n        default=1000,\n        gt=0,\n    )\n    chunk_header_level: int = Field(\n        description=\"The header level to split on\",\n        default=DEFAULT_CHUNK_HEADER_LEVEL,\n        ge=1,\n        le=6,\n    )\n\n\nclass ChunkSplitterConfig(BaseModel):\n    splitter: ChunkSplitter = Field(default=ChunkSplitter.SENTENCE_SPLITTER)\n    splitter_options: Union[SentenceSplitterOptions, MarkdownNodeParserOptions] = (\n        Field()\n    )\n\n\nclass ChunkingMode(str, enum.Enum):\n    GENERAL = \"general\"\n    ADVANCED = \"advanced\"\n\n\nclass BaseChunkingConfig(BaseModel):\n    mode: ChunkingMode = Field(default=ChunkingMode.GENERAL)\n\n\nclass GeneralChunkingConfig(BaseChunkingConfig):\n    mode: ChunkingMode = Field(default=ChunkingMode.GENERAL)\n    chunk_size: int = Field(default=DEFAULT_CHUNK_SIZE, gt=0)\n    chunk_overlap: int = Field(default=SENTENCE_CHUNK_OVERLAP, gt=0)\n    paragraph_separator: str = Field(default=DEFAULT_PARAGRAPH_SEP)\n\n\nclass AdvancedChunkingConfig(BaseChunkingConfig):\n    mode: ChunkingMode = Field(default=ChunkingMode.ADVANCED)\n    rules: Dict[MimeTypes, ChunkSplitterConfig] = Field(default_factory=list)\n\n\nChunkingConfig = Union[GeneralChunkingConfig | AdvancedChunkingConfig]\n\n# Knowledge Base Model\n\n\nclass KnowledgeBase(SQLModel, table=True):\n    __tablename__ = \"knowledge_bases\"\n\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=255, nullable=False)\n    description: Optional[str] = Field(sa_column=Column(MEDIUMTEXT), default=None)\n\n    # The config for chunking, the process to break down the document into smaller chunks.\n    chunking_config: Dict = Field(\n        sa_column=Column(JSON), default=GeneralChunkingConfig().model_dump()\n    )\n\n    # Data sources config.\n    data_sources: list[\"DataSource\"] = SQLRelationship(\n        link_model=KnowledgeBaseDataSource\n    )\n\n    # Index Config.\n    index_methods: list[IndexMethod] = Field(\n        default=[IndexMethod.VECTOR], sa_column=Column(JSON)\n    )\n    llm_id: int = Field(foreign_key=\"llms.id\", nullable=True)\n    llm: \"LLM\" = SQLRelationship(\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"foreign_keys\": \"KnowledgeBase.llm_id\",\n        },\n    )\n    embedding_model_id: int = Field(foreign_key=\"embedding_models.id\", nullable=True)\n    embedding_model: \"EmbeddingModel\" = SQLRelationship(\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"foreign_keys\": \"KnowledgeBase.embedding_model_id\",\n        },\n    )\n    documents_total: int = Field(default=0)\n    data_sources_total: int = Field(default=0)\n\n    # TODO: Support knowledge-base level permission control.\n\n    created_by: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    creator: \"User\" = SQLRelationship(\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"KnowledgeBase.created_by == User.id\",\n        },\n    )\n    created_at: Optional[datetime] = Field(\n        default=None, sa_column=Column(DateTime(), server_default=func.now())\n    )\n    updated_by: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    updated_at: Optional[datetime] = Field(\n        default=None,\n        sa_column=Column(DateTime(), server_default=func.now(), onupdate=func.now()),\n    )\n    deleted_by: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    deleted_at: Optional[datetime] = Field(default=None, sa_column=Column(DateTime()))\n\n    def __init__(self, **kwargs):\n        kwargs.setdefault(\"data_sources\", [])\n        super().__init__(**kwargs)\n\n    def __hash__(self):\n        return hash(self.id)\n\n    def get_data_source_by_id(self, data_source_id: int) -> Optional[DataSource]:\n        return next(\n            (\n                ds\n                for ds in self.data_sources\n                if ds.id == data_source_id and not ds.deleted_at\n            ),\n            None,\n        )\n\n    def must_get_data_source_by_id(self, data_source_id: int) -> DataSource:\n        data_source = self.get_data_source_by_id(data_source_id)\n        if data_source is None:\n            raise KBDataSourceNotFound(self.id, data_source_id)\n        return data_source\n\n    def to_descriptor(self) -> KnowledgeBaseDescriptor:\n        return KnowledgeBaseDescriptor(\n            id=self.id,\n            name=self.name,\n        )\n"
  },
  {
    "path": "backend/app/models/knowledge_base_scoped/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/models/knowledge_base_scoped/table_naming.py",
    "content": "import logging\nimport re\n\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.models.embed_model import DEFAULT_VECTOR_DIMENSION\n\n\nlogger = logging.getLogger(__name__)\n\n\nDEFAULT_CHUNKS_TABLE_NAME = \"chunks\"\nDEFAULT_ENTITIES_TABLE_NAME = \"entities\"\nDEFAULT_RELATIONSHIPS_TABLE_NAME = \"relationships\"\nCHUNKS_TABLE_PREFIX = \"chunks_\"\nENTITIES_TABLE_PREFIX = \"entities_\"\nRELATIONSHIPS_TABLE_PREFIX = \"relationships_\"\nKB_CHUNKS_TABLE_PATTERN = re.compile(r\"^chunks_\\d+$\")\nKB_ENTITIES_TABLE_PATTERN = re.compile(r\"^entities_\\d+$\")\nKB_RELATIONSHIPS_TABLE_PATTERN = re.compile(r\"^relationships_\\d+$\")\n\n\ndef get_kb_chunks_table_name(knowledge_base: KnowledgeBase) -> str:\n    return (\n        CHUNKS_TABLE_PREFIX + str(knowledge_base.id)\n        if knowledge_base\n        else DEFAULT_CHUNKS_TABLE_NAME\n    )\n\n\ndef get_kb_relationships_table_name(knowledge_base: KnowledgeBase) -> str:\n    return (\n        RELATIONSHIPS_TABLE_PREFIX + str(knowledge_base.id)\n        if knowledge_base\n        else DEFAULT_RELATIONSHIPS_TABLE_NAME\n    )\n\n\ndef get_kb_entities_table_name(knowledge_base: KnowledgeBase) -> str:\n    return (\n        ENTITIES_TABLE_PREFIX + str(knowledge_base.id)\n        if knowledge_base\n        else DEFAULT_ENTITIES_TABLE_NAME\n    )\n\n\ndef get_kb_vector_dims(kb: KnowledgeBase):\n    vector_dimension = DEFAULT_VECTOR_DIMENSION\n    if kb.embedding_model and kb.embedding_model.vector_dimension:\n        vector_dimension = kb.embedding_model.vector_dimension\n    else:\n        logger.warning(\n            \"This knowledge base doesn't configured a embedding model or this vector vector_dimension \"\n            \"of the embedding model is miss.\"\n        )\n    return vector_dimension\n"
  },
  {
    "path": "backend/app/models/llm.py",
    "content": "from typing import Optional, Any\nfrom sqlmodel import Field, Column, JSON, String\nfrom pydantic import BaseModel\nfrom app.rag.llms.provider import LLMProvider\nfrom .base import UpdatableBaseModel, AESEncryptedColumn\n\n\nclass BaseLLM(UpdatableBaseModel):\n    name: str = Field(max_length=64)\n    provider: LLMProvider = Field(sa_column=Column(String(32), nullable=False))\n    model: str = Field(max_length=256)\n    config: dict | list | None = Field(sa_column=Column(JSON), default={})\n    is_default: bool = Field(default=False)\n\n\nclass LLM(BaseLLM, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    credentials: Any = Field(sa_column=Column(AESEncryptedColumn, nullable=True))\n\n    __tablename__ = \"llms\"\n\n\nclass AdminLLM(BaseLLM):\n    id: int\n\n\nclass LLMUpdate(BaseModel):\n    name: Optional[str] = None\n    config: Optional[dict] = None\n    credentials: Optional[str | dict] = None\n"
  },
  {
    "path": "backend/app/models/recommend_question.py",
    "content": "from typing import Optional, List\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    JSON,\n    Relationship as SQLRelationship,\n)\n\nfrom .base import UpdatableBaseModel\n\n\nclass RecommendQuestion(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    questions: List = Field(default=[], sa_column=Column(JSON))\n    chat_message_id: int = Field(foreign_key=\"chat_messages.id\", index=True)\n    chat_message: \"ChatMessage\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"RecommendQuestion.chat_message_id == ChatMessage.id\",\n        },\n    )\n\n    __tablename__ = \"recommend_questions\"\n"
  },
  {
    "path": "backend/app/models/relationship.py",
    "content": "from datetime import datetime\nfrom app.utils.singleflight_cache import singleflight_cache\nfrom typing import Optional, Type\nfrom uuid import UUID\n\nfrom pydantic import BaseModel\nfrom sqlalchemy import Column, Text, JSON, DateTime\nfrom sqlmodel import (\n    SQLModel,\n    Field,\n    Relationship as SQLRelationship,\n)\nfrom tidb_vector.sqlalchemy import VectorType\nfrom app.models.entity import get_kb_entity_model\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.models.knowledge_base_scoped.table_naming import get_kb_vector_dims\nfrom app.utils.namespace import format_namespace\nfrom app.logger import logger\n\n\nclass RelationshipPublic(BaseModel):\n    id: int\n    description: str\n    source_entity_id: int\n    target_entity_id: int\n    meta: dict = Field(default_factory=dict)\n    weight: Optional[int] = Field(default=0)\n    last_modified_at: Optional[datetime] = Field(default=None)\n    document_id: Optional[int] = Field(default=None)\n    chunk_id: Optional[UUID] = Field(default=None)\n\n\ndef get_kb_relationship_model(kb: KnowledgeBase) -> Type[SQLModel]:\n    vector_dimension = get_kb_vector_dims(kb)\n    entity_model = get_kb_entity_model(kb)\n    return get_dynamic_relationship_model(vector_dimension, str(kb.id), entity_model)\n\n\n@singleflight_cache\ndef get_dynamic_relationship_model(\n    vector_dimension: int,\n    namespace: Optional[str] = None,\n    entity_model: Optional[Type[SQLModel]] = None,\n) -> Type[SQLModel]:\n    namespace = format_namespace(namespace)\n    entity_table_name = entity_model.__tablename__\n    entity_model_name = entity_model.__name__\n    relationship_table_name = f\"relationships_{namespace}\"\n    relationship_model_name = f\"Relationship_{namespace}_{vector_dimension}\"\n\n    logger.info(\n        \"Dynamic create relationship model (dimension: %s, table: %s, model: %s)\",\n        vector_dimension,\n        relationship_table_name,\n        relationship_model_name,\n    )\n\n    class Relationship(SQLModel):\n        id: Optional[int] = Field(default=None, primary_key=True)\n        description: str = Field(sa_column=Column(Text))\n        meta: dict = Field(default_factory=dict, sa_column=Column(JSON))\n        weight: int = 0\n        source_entity_id: int = Field(foreign_key=f\"{entity_table_name}.id\")\n        target_entity_id: int = Field(foreign_key=f\"{entity_table_name}.id\")\n        last_modified_at: Optional[datetime] = Field(sa_column=Column(DateTime))\n        document_id: Optional[int] = Field(default=None, nullable=True)\n        chunk_id: Optional[UUID] = Field(default=None, nullable=True)\n        description_vec: list[float] = Field(sa_type=VectorType(vector_dimension))\n\n        def __hash__(self):\n            return hash(self.id)\n\n        def screenshot(self):\n            obj_dict = self.model_dump(\n                exclude={\n                    \"description_vec\",\n                    \"source_entity\",\n                    \"target_entity\",\n                    \"last_modified_at\",\n                }\n            )\n            return obj_dict\n\n    relationship_model = type(\n        relationship_model_name,\n        (Relationship,),\n        {\n            \"__tablename__\": relationship_table_name,\n            \"__table_args__\": {\"extend_existing\": True},\n            \"__annotations__\": {\n                \"source_entity\": entity_model,\n                \"target_entity\": entity_model,\n            },\n            \"source_entity\": SQLRelationship(\n                sa_relationship_kwargs={\n                    \"primaryjoin\": f\"{relationship_model_name}.source_entity_id == {entity_model_name}.id\",\n                    \"lazy\": \"joined\",\n                },\n            ),\n            \"target_entity\": SQLRelationship(\n                sa_relationship_kwargs={\n                    \"primaryjoin\": f\"{relationship_model_name}.target_entity_id == {entity_model_name}.id\",\n                    \"lazy\": \"joined\",\n                },\n            ),\n        },\n        table=True,\n    )\n\n    return relationship_model\n"
  },
  {
    "path": "backend/app/models/reranker_model.py",
    "content": "from typing import Optional, Any\n\nfrom sqlmodel import Field, Column, JSON, String\n\nfrom .base import UpdatableBaseModel, AESEncryptedColumn\nfrom app.rag.rerankers.provider import RerankerProvider\nfrom pydantic import BaseModel\n\n\nclass BaseRerankerModel(UpdatableBaseModel):\n    name: str = Field(max_length=64)\n    provider: RerankerProvider = Field(sa_column=Column(String(32), nullable=False))\n    model: str = Field(max_length=256)\n    top_n: int = Field(default=10)\n    config: dict | list | None = Field(sa_column=Column(JSON), default={})\n    is_default: bool = Field(default=False)\n\n\nclass RerankerModel(BaseRerankerModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    credentials: Any = Field(sa_column=Column(AESEncryptedColumn, nullable=True))\n\n    __tablename__ = \"reranker_models\"\n\n\nclass AdminRerankerModel(BaseRerankerModel):\n    id: int\n\n\nclass RerankerModelUpdate(BaseModel):\n    name: Optional[str] = None\n    config: Optional[dict | list] = None\n    credentials: Optional[str | dict] = None\n    top_n: Optional[int] = None\n"
  },
  {
    "path": "backend/app/models/semantic_cache.py",
    "content": "from typing import Optional, Any\nfrom datetime import datetime\n\nfrom sqlmodel import (\n    SQLModel,\n    Field,\n    Column,\n    JSON,\n    Text,\n    func,\n    DateTime,\n)\nfrom tidb_vector.sqlalchemy import VectorType\n\nfrom app.core.config import settings\n\n\nclass SemanticCache(SQLModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    query: str = Field(sa_column=Column(Text))\n    query_vec: Any = Field(\n        sa_column=Column(\n            VectorType(settings.EMBEDDING_DIMS), comment=\"hnsw(distance=cosine)\"\n        )\n    )\n    value: str = Field(sa_column=Column(Text))\n    value_vec: Any = Field(\n        sa_column=Column(\n            VectorType(settings.EMBEDDING_DIMS), comment=\"hnsw(distance=cosine)\"\n        )\n    )\n    meta: dict = Field(default_factory=dict, sa_column=Column(JSON))\n    created_at: datetime = Field(\n        sa_column=Column(DateTime, server_default=func.now(), nullable=True)\n    )\n    updated_at: datetime = Field(\n        sa_column=Column(\n            DateTime, server_default=func.now(), onupdate=func.now(), nullable=True\n        )\n    )\n\n    __tablename__ = \"semantic_cache\"\n    __table_args__ = {\n        # Ref: https://docs.pingcap.com/tidb/stable/time-to-live\n        \"mysql_TTL\": \"created_at + INTERVAL 1 MONTH;\",\n    }\n\n    def __hash__(self):\n        return hash(self.id)\n\n    # screenshot method is used to return a dictionary representation of the object\n    # that can be used for recording or debugging purposes\n    def screenshot(self):\n        return self.model_dump(exclude={\"query_vec\", \"value_vec\"})\n"
  },
  {
    "path": "backend/app/models/site_setting.py",
    "content": "from typing import Optional\nfrom datetime import datetime\n\nfrom sqlmodel import SQLModel, Field, Column, JSON, func\nfrom sqlalchemy.dialects.mysql import DATETIME\n\n\nclass SiteSetting(SQLModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=256, unique=True)\n    data_type: str = Field(max_length=256)\n    value: str = Field(sa_column=Column(JSON))\n    created_at: Optional[datetime] = Field(\n        default=None,\n        sa_column=Column(DATETIME(timezone=True, fsp=6), server_default=func.now()),\n    )\n    updated_at: Optional[datetime] = Field(\n        default=None,\n        sa_column=Column(\n            # SiteSetting needs more time precision to avoid timestamp collision between each other,\n            # so we use mysql.DATETIME rather than base.UpdatableBaseModel\n            DATETIME(timezone=True, fsp=6),\n            server_default=func.now(),\n            onupdate=func.now(),\n        ),\n    )\n\n    __tablename__ = \"site_settings\"\n"
  },
  {
    "path": "backend/app/models/staff_action_log.py",
    "content": "from typing import Optional, Dict\nfrom datetime import datetime\n\nfrom sqlmodel import SQLModel, Field, Column, JSON, DateTime, func\n\n\nclass StaffActionLog(SQLModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    action: str\n    action_time: datetime = Field(sa_column=Column(DateTime, server_default=func.now()))\n    target_type: str\n    target_id: int\n    before: Dict = Field(default_factory=dict, sa_column=Column(JSON))\n    after: Dict = Field(default_factory=dict, sa_column=Column(JSON))\n\n    __tablename__ = \"staff_action_logs\"\n"
  },
  {
    "path": "backend/app/models/upload.py",
    "content": "from uuid import UUID\nfrom typing import Optional\n\nfrom sqlmodel import (\n    Field,\n    Column,\n    String,\n    Relationship as SQLRelationship,\n)\n\nfrom app.models.base import UpdatableBaseModel\nfrom app.types import MimeTypes\n\n\nclass Upload(UpdatableBaseModel, table=True):\n    id: Optional[int] = Field(default=None, primary_key=True)\n    name: str = Field(max_length=255)\n    size: int = Field(default=0)\n    path: str = Field(max_length=255)\n    mime_type: MimeTypes = Field(sa_column=Column(String(128), nullable=False))\n    user_id: UUID = Field(foreign_key=\"users.id\", nullable=True)\n    user: \"User\" = SQLRelationship(  # noqa:F821\n        sa_relationship_kwargs={\n            \"lazy\": \"joined\",\n            \"primaryjoin\": \"Upload.user_id == User.id\",\n        },\n    )\n\n    __tablename__ = \"uploads\"\n"
  },
  {
    "path": "backend/app/rag/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/build_index.py",
    "content": "import logging\nfrom typing import List, Optional, Type\n\nfrom llama_index.core import VectorStoreIndex\nfrom llama_index.core.embeddings.utils import EmbedType\nfrom llama_index.core.llms.llm import LLM\nfrom llama_index.core.node_parser import SentenceSplitter\nfrom llama_index.core.schema import TransformComponent\n\nfrom sqlmodel import SQLModel, Session\nfrom app.models.knowledge_base import (\n    ChunkSplitter,\n    ChunkingMode,\n    KnowledgeBase,\n    SentenceSplitterOptions,\n    GeneralChunkingConfig,\n    ChunkSplitterConfig,\n    MarkdownNodeParserOptions,\n    AdvancedChunkingConfig,\n)\nfrom app.rag.knowledge_base.index_store import (\n    get_kb_tidb_vector_store,\n    get_kb_tidb_graph_store,\n)\nfrom app.rag.indices.knowledge_graph import KnowledgeGraphIndex\nfrom app.models import Document\nfrom app.rag.node_parser.file.markdown import MarkdownNodeParser\nfrom app.types import MimeTypes\nfrom app.rag.llms.dspy import get_dspy_lm_by_llama_llm\n\nlogger = logging.getLogger(__name__)\n\n\nclass IndexService:\n    \"\"\"\n    Service class for building RAG indexes (vector index and knowledge graph index).\n    \"\"\"\n\n    def __init__(\n        self,\n        llm: LLM,\n        embed_model: Optional[EmbedType] = None,\n        knowledge_base: Optional[KnowledgeBase] = None,\n    ):\n        self._llm = llm\n        self._dspy_lm = get_dspy_lm_by_llama_llm(llm)\n        self._embed_model = embed_model\n        self._knowledge_base = knowledge_base\n\n    # TODO: move to ./indices/vector_search\n    def build_vector_index_for_document(\n        self, session: Session, db_document: Type[Document]\n    ):\n        \"\"\"\n        Build vector index and graph index from document.\n\n        Build vector index will do the following:\n        1. Parse document into nodes.\n        2. Extract metadata from nodes by applying transformations.\n        3. embedding text nodes.\n        4. Insert nodes into `chunks` table.\n        \"\"\"\n        vector_store = get_kb_tidb_vector_store(session, self._knowledge_base)\n        transformations = self._get_transformations(db_document)\n        vector_index = VectorStoreIndex.from_vector_store(\n            vector_store,\n            embed_model=self._embed_model,\n            transformations=transformations,\n        )\n\n        llama_document = db_document.to_llama_document()\n        logger.info(f\"Start building vector index for document #{db_document.id}.\")\n        vector_index.insert(llama_document, source_uri=db_document.source_uri)\n        logger.info(f\"Finish building vector index for document #{db_document.id}.\")\n        vector_store.close_session()\n\n        return\n\n    def _get_transformations(\n        self, db_document: Type[Document]\n    ) -> List[TransformComponent]:\n        transformations = []\n\n        chunking_config_dict = self._knowledge_base.chunking_config\n        mode = (\n            chunking_config_dict[\"mode\"]\n            if \"mode\" in chunking_config_dict\n            else ChunkingMode.GENERAL\n        )\n\n        if mode == ChunkingMode.ADVANCED:\n            chunking_config = AdvancedChunkingConfig.model_validate(\n                chunking_config_dict\n            )\n            rules = chunking_config.rules\n        else:\n            chunking_config = GeneralChunkingConfig.model_validate(chunking_config_dict)\n            rules = {\n                MimeTypes.PLAIN_TXT: ChunkSplitterConfig(\n                    splitter=ChunkSplitter.SENTENCE_SPLITTER,\n                    splitter_options=SentenceSplitterOptions(\n                        chunk_size=chunking_config.chunk_size,\n                        chunk_overlap=chunking_config.chunk_overlap,\n                        paragraph_separator=chunking_config.paragraph_separator,\n                    ),\n                ),\n                MimeTypes.MARKDOWN: ChunkSplitterConfig(\n                    splitter=ChunkSplitter.MARKDOWN_NODE_PARSER,\n                    splitter_options=MarkdownNodeParserOptions(\n                        chunk_size=chunking_config.chunk_size,\n                    ),\n                ),\n            }\n\n        # Chunking\n        mime_type = db_document.mime_type\n        if mime_type not in rules:\n            raise RuntimeError(\n                f\"Can not chunking for the document in {db_document.mime_type} format\"\n            )\n\n        rule = rules[mime_type]\n        match rule.splitter:\n            case ChunkSplitter.MARKDOWN_NODE_PARSER:\n                options = MarkdownNodeParserOptions.model_validate(\n                    rule.splitter_options\n                )\n                transformations.append(MarkdownNodeParser(**options.model_dump()))\n            case ChunkSplitter.SENTENCE_SPLITTER:\n                options = SentenceSplitterOptions.model_validate(rule.splitter_options)\n                transformations.append(SentenceSplitter(**options.model_dump()))\n            case _:\n                raise ValueError(f\"Unsupported chunking splitter type: {rule.splitter}\")\n\n        return transformations\n\n    # TODO: move to ./indices/knowledge_graph\n    def build_kg_index_for_chunk(self, session: Session, db_chunk: Type[SQLModel]):\n        \"\"\"Build knowledge graph index from chunk.\n\n        Build knowledge graph index will do the following:\n        1. load TextNode from `chunks` table.\n        2. extract entities and relations from TextNode.\n        3. insert entities and relations into `entities` and `relations` table.\n        \"\"\"\n\n        graph_store = get_kb_tidb_graph_store(session, self._knowledge_base)\n        graph_index: KnowledgeGraphIndex = KnowledgeGraphIndex.from_existing(\n            dspy_lm=self._dspy_lm,\n            kg_store=graph_store,\n        )\n\n        node = db_chunk.to_llama_text_node()\n        logger.info(f\"Start building knowledge graph index for chunk #{db_chunk.id}.\")\n        graph_index.insert_nodes([node])\n        logger.info(f\"Finish building knowledge graph index for chunk #{db_chunk.id}.\")\n        graph_store.close_session()\n\n        return\n"
  },
  {
    "path": "backend/app/rag/chat/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/chat/chat_flow.py",
    "content": "import json\nimport logging\nfrom datetime import datetime, UTC\nfrom typing import List, Optional, Generator, Tuple, Any\nfrom urllib.parse import urljoin\nfrom uuid import UUID\n\nimport requests\nfrom langfuse.llama_index import LlamaIndexInstrumentor\nfrom langfuse.llama_index._context import langfuse_instrumentor_context\nfrom llama_index.core import get_response_synthesizer\nfrom llama_index.core.base.llms.types import ChatMessage, MessageRole\nfrom llama_index.core.schema import NodeWithScore\nfrom llama_index.core.prompts.rich import RichPromptTemplate\n\nfrom sqlmodel import Session\nfrom app.core.config import settings\nfrom app.exceptions import ChatNotFound\nfrom app.models import (\n    User,\n    Chat as DBChat,\n    ChatVisibility,\n    ChatMessage as DBChatMessage,\n)\nfrom app.rag.chat.config import ChatEngineConfig\nfrom app.rag.chat.retrieve.retrieve_flow import SourceDocument, RetrieveFlow\nfrom app.rag.chat.stream_protocol import (\n    ChatEvent,\n    ChatStreamDataPayload,\n    ChatStreamMessagePayload,\n)\nfrom app.rag.llms.dspy import get_dspy_lm_by_llama_llm\nfrom app.rag.retrievers.knowledge_graph.schema import KnowledgeGraphRetrievalResult\nfrom app.rag.types import ChatEventType, ChatMessageSate\nfrom app.rag.utils import parse_goal_response_format\nfrom app.repositories import chat_repo\nfrom app.site_settings import SiteSetting\nfrom app.utils.tracing import LangfuseContextManager\n\nlogger = logging.getLogger(__name__)\n\n\ndef parse_chat_messages(\n    chat_messages: List[ChatMessage],\n) -> tuple[str, List[ChatMessage]]:\n    user_question = chat_messages[-1].content\n    chat_history = chat_messages[:-1]\n    return user_question, chat_history\n\n\nclass ChatFlow:\n    _trace_manager: LangfuseContextManager\n\n    def __init__(\n        self,\n        *,\n        db_session: Session,\n        user: User,\n        browser_id: str,\n        origin: str,\n        chat_messages: List[ChatMessage],\n        engine_name: str = \"default\",\n        chat_id: Optional[UUID] = None,\n    ) -> None:\n        self.chat_id = chat_id\n        self.db_session = db_session\n        self.user = user\n        self.browser_id = browser_id\n        self.engine_name = engine_name\n\n        # Load chat engine and chat session.\n        self.user_question, self.chat_history = parse_chat_messages(chat_messages)\n        if chat_id:\n            # FIXME:\n            #   only chat owner or superuser can access the chat,\n            #   anonymous user can only access anonymous chat by track_id\n            self.db_chat_obj = chat_repo.get(self.db_session, chat_id)\n            if not self.db_chat_obj:\n                raise ChatNotFound(chat_id)\n            try:\n                self.engine_config = ChatEngineConfig.load_from_db(\n                    db_session, self.db_chat_obj.engine.name\n                )\n                self.db_chat_engine = self.engine_config.get_db_chat_engine()\n            except Exception as e:\n                logger.error(f\"Failed to load chat engine config: {e}\")\n                self.engine_config = ChatEngineConfig.load_from_db(\n                    db_session, engine_name\n                )\n                self.db_chat_engine = self.engine_config.get_db_chat_engine()\n            logger.info(\n                f\"Init ChatFlow for chat {chat_id} (chat_engine: {self.db_chat_obj.engine.name})\"\n            )\n            self.chat_history = [\n                ChatMessage(role=m.role, content=m.content, additional_kwargs={})\n                for m in chat_repo.get_messages(self.db_session, self.db_chat_obj)\n            ]\n        else:\n            self.engine_config = ChatEngineConfig.load_from_db(db_session, engine_name)\n            self.db_chat_engine = self.engine_config.get_db_chat_engine()\n            self.db_chat_obj = chat_repo.create(\n                self.db_session,\n                DBChat(\n                    # TODO: title should be generated by the LLM\n                    title=self.user_question[:100],\n                    engine_id=self.db_chat_engine.id,\n                    engine_options=self.engine_config.screenshot(),\n                    user_id=self.user.id if self.user else None,\n                    browser_id=self.browser_id,\n                    origin=origin,\n                    visibility=(\n                        ChatVisibility.PUBLIC\n                        if not self.user\n                        else ChatVisibility.PRIVATE\n                    ),\n                ),\n            )\n            chat_id = self.db_chat_obj.id\n\n            # Notice: slack/discord bots may create a new chat with history messages.\n            now = datetime.now(UTC)\n            for i, m in enumerate(self.chat_history):\n                chat_repo.create_message(\n                    session=self.db_session,\n                    chat=self.db_chat_obj,\n                    chat_message=DBChatMessage(\n                        role=m.role,\n                        content=m.content,\n                        ordinal=i + 1,\n                        created_at=now,\n                        updated_at=now,\n                        finished_at=now,\n                    ),\n                )\n\n        # Init Langfuse for tracing.\n        enable_langfuse = (\n            SiteSetting.langfuse_secret_key and SiteSetting.langfuse_public_key\n        )\n        instrumentor = LlamaIndexInstrumentor(\n            host=SiteSetting.langfuse_host,\n            secret_key=SiteSetting.langfuse_secret_key,\n            public_key=SiteSetting.langfuse_public_key,\n            enabled=enable_langfuse,\n        )\n        self._trace_manager = LangfuseContextManager(instrumentor)\n\n        # Init LLM.\n        self._llm = self.engine_config.get_llama_llm(self.db_session)\n        self._fast_llm = self.engine_config.get_fast_llama_llm(self.db_session)\n        self._fast_dspy_lm = get_dspy_lm_by_llama_llm(self._fast_llm)\n\n        # Load knowledge bases.\n        self.knowledge_bases = self.engine_config.get_knowledge_bases(self.db_session)\n        self.knowledge_base_ids = [kb.id for kb in self.knowledge_bases]\n\n        # Init retrieve flow.\n        self.retrieve_flow = RetrieveFlow(\n            db_session=self.db_session,\n            engine_name=self.engine_name,\n            engine_config=self.engine_config,\n            llm=self._llm,\n            fast_llm=self._fast_llm,\n            knowledge_bases=self.knowledge_bases,\n        )\n\n    def chat(self) -> Generator[ChatEvent | str, None, None]:\n        try:\n            with self._trace_manager.observe(\n                trace_name=\"ChatFlow\",\n                user_id=(\n                    self.user.email if self.user else f\"anonymous-{self.browser_id}\"\n                ),\n                metadata={\n                    \"is_external_engine\": self.engine_config.is_external_engine,\n                    \"chat_engine_config\": self.engine_config.screenshot(),\n                },\n                tags=[f\"chat_engine:{self.engine_name}\"],\n                release=settings.ENVIRONMENT,\n            ) as trace:\n                trace.update(\n                    input={\n                        \"user_question\": self.user_question,\n                        \"chat_history\": self.chat_history,\n                    }\n                )\n\n                if self.engine_config.is_external_engine:\n                    yield from self._external_chat()\n                else:\n                    response_text, source_documents = yield from self._builtin_chat()\n                    trace.update(output=response_text)\n        except Exception as e:\n            logger.exception(e)\n            yield ChatEvent(\n                event_type=ChatEventType.ERROR_PART,\n                payload=\"Encountered an error while processing the chat. Please try again later.\",\n            )\n\n    def _builtin_chat(\n        self,\n    ) -> Generator[ChatEvent | str, None, Tuple[Optional[str], List[Any]]]:\n        ctx = langfuse_instrumentor_context.get().copy()\n        db_user_message, db_assistant_message = yield from self._chat_start()\n        langfuse_instrumentor_context.get().update(ctx)\n\n        # 1. Retrieve Knowledge graph related to the user question.\n        (\n            knowledge_graph,\n            knowledge_graph_context,\n        ) = yield from self._search_knowledge_graph(user_question=self.user_question)\n\n        # 2. Refine the user question using knowledge graph and chat history.\n        refined_question = yield from self._refine_user_question(\n            user_question=self.user_question,\n            chat_history=self.chat_history,\n            knowledge_graph_context=knowledge_graph_context,\n            refined_question_prompt=self.engine_config.llm.condense_question_prompt,\n        )\n\n        # 3. Check if the question provided enough context information or need to clarify.\n        if self.engine_config.clarify_question:\n            need_clarify, need_clarify_response = yield from self._clarify_question(\n                user_question=refined_question,\n                chat_history=self.chat_history,\n                knowledge_graph_context=knowledge_graph_context,\n            )\n            if need_clarify:\n                yield from self._chat_finish(\n                    db_assistant_message=db_assistant_message,\n                    db_user_message=db_user_message,\n                    response_text=need_clarify_response,\n                    knowledge_graph=knowledge_graph,\n                    source_documents=[],\n                )\n                return None, []\n\n        # 4. Use refined question to search for relevant chunks.\n        relevant_chunks = yield from self._search_relevance_chunks(\n            user_question=refined_question\n        )\n\n        # 5. Generate a response using the refined question and related chunks\n        response_text, source_documents = yield from self._generate_answer(\n            user_question=refined_question,\n            knowledge_graph_context=knowledge_graph_context,\n            relevant_chunks=relevant_chunks,\n        )\n\n        yield from self._chat_finish(\n            db_assistant_message=db_assistant_message,\n            db_user_message=db_user_message,\n            response_text=response_text,\n            knowledge_graph=knowledge_graph,\n            source_documents=source_documents,\n        )\n\n        return response_text, source_documents\n\n    def _chat_start(\n        self,\n    ) -> Generator[ChatEvent, None, Tuple[DBChatMessage, DBChatMessage]]:\n        db_user_message = chat_repo.create_message(\n            session=self.db_session,\n            chat=self.db_chat_obj,\n            chat_message=DBChatMessage(\n                role=MessageRole.USER.value,\n                trace_url=self._trace_manager.trace_url,\n                content=self.user_question.strip(),\n            ),\n        )\n        db_assistant_message = chat_repo.create_message(\n            session=self.db_session,\n            chat=self.db_chat_obj,\n            chat_message=DBChatMessage(\n                role=MessageRole.ASSISTANT.value,\n                trace_url=self._trace_manager.trace_url,\n                content=\"\",\n            ),\n        )\n        yield ChatEvent(\n            event_type=ChatEventType.DATA_PART,\n            payload=ChatStreamDataPayload(\n                chat=self.db_chat_obj,\n                user_message=db_user_message,\n                assistant_message=db_assistant_message,\n            ),\n        )\n        return db_user_message, db_assistant_message\n\n    def _search_knowledge_graph(\n        self,\n        user_question: str,\n        annotation_silent: bool = False,\n    ) -> Generator[ChatEvent, None, Tuple[KnowledgeGraphRetrievalResult, str]]:\n        kg_config = self.engine_config.knowledge_graph\n        if kg_config is None or kg_config.enabled is False:\n            return KnowledgeGraphRetrievalResult(), \"\"\n\n        with self._trace_manager.span(\n            name=\"search_knowledge_graph\", input=user_question\n        ) as span:\n            if not annotation_silent:\n                if kg_config.using_intent_search:\n                    yield ChatEvent(\n                        event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                        payload=ChatStreamMessagePayload(\n                            state=ChatMessageSate.KG_RETRIEVAL,\n                            display=\"Identifying The Question's Intents and Perform Knowledge Graph Search\",\n                        ),\n                    )\n                else:\n                    yield ChatEvent(\n                        event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                        payload=ChatStreamMessagePayload(\n                            state=ChatMessageSate.KG_RETRIEVAL,\n                            display=\"Searching the Knowledge Graph for Relevant Context\",\n                        ),\n                    )\n\n            knowledge_graph, knowledge_graph_context = (\n                self.retrieve_flow.search_knowledge_graph(user_question)\n            )\n\n            span.end(\n                output={\n                    \"knowledge_graph\": knowledge_graph,\n                    \"knowledge_graph_context\": knowledge_graph_context,\n                }\n            )\n\n        return knowledge_graph, knowledge_graph_context\n\n    def _refine_user_question(\n        self,\n        user_question: str,\n        chat_history: Optional[List[ChatMessage]] = [],\n        refined_question_prompt: Optional[str] = None,\n        knowledge_graph_context: str = \"\",\n        annotation_silent: bool = False,\n    ) -> Generator[ChatEvent, None, str]:\n        with self._trace_manager.span(\n            name=\"refine_user_question\",\n            input={\n                \"user_question\": user_question,\n                \"chat_history\": chat_history,\n                \"knowledge_graph_context\": knowledge_graph_context,\n            },\n        ) as span:\n            if not annotation_silent:\n                yield ChatEvent(\n                    event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                    payload=ChatStreamMessagePayload(\n                        state=ChatMessageSate.REFINE_QUESTION,\n                        display=\"Query Rewriting for Enhanced Information Retrieval\",\n                    ),\n                )\n\n            prompt_template = RichPromptTemplate(refined_question_prompt)\n            refined_question = self._fast_llm.predict(\n                prompt_template,\n                graph_knowledges=knowledge_graph_context,\n                chat_history=chat_history,\n                question=user_question,\n                current_date=datetime.now().strftime(\"%Y-%m-%d\"),\n            )\n\n            if not annotation_silent:\n                yield ChatEvent(\n                    event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                    payload=ChatStreamMessagePayload(\n                        state=ChatMessageSate.REFINE_QUESTION,\n                        message=refined_question,\n                    ),\n                )\n\n            span.end(output=refined_question)\n\n            return refined_question\n\n    def _clarify_question(\n        self,\n        user_question: str,\n        chat_history: Optional[List[ChatMessage]] = [],\n        knowledge_graph_context: str = \"\",\n    ) -> Generator[ChatEvent, None, Tuple[bool, str]]:\n        \"\"\"\n        Check if the question clear and provided enough context information, otherwise, it is necessary to\n        stop the conversation early and ask the user for the further clarification.\n\n        Args:\n            user_question: str\n            knowledge_graph_context: str\n\n        Returns:\n            bool: Determine whether further clarification of the issue is needed from the user.\n            str: The content of the questions that require clarification from the user.\n        \"\"\"\n        with self._trace_manager.span(\n            name=\"clarify_question\",\n            input={\n                \"user_question\": user_question,\n                \"knowledge_graph_context\": knowledge_graph_context,\n            },\n        ) as span:\n            prompt_template = RichPromptTemplate(\n                self.engine_config.llm.clarifying_question_prompt\n            )\n\n            prediction = self._fast_llm.predict(\n                prompt_template,\n                graph_knowledges=knowledge_graph_context,\n                chat_history=chat_history,\n                question=user_question,\n            )\n            # TODO: using structured output to get the clarity result.\n            clarity_result = prediction.strip().strip(\".\\\"'!\")\n            need_clarify = clarity_result.lower() != \"false\"\n            need_clarify_response = clarity_result if need_clarify else \"\"\n\n            if need_clarify:\n                yield ChatEvent(\n                    event_type=ChatEventType.TEXT_PART,\n                    payload=need_clarify_response,\n                )\n\n            span.end(\n                output={\n                    \"need_clarify\": need_clarify,\n                    \"need_clarify_response\": need_clarify_response,\n                }\n            )\n\n            return need_clarify, need_clarify_response\n\n    def _search_relevance_chunks(\n        self, user_question: str\n    ) -> Generator[ChatEvent, None, List[NodeWithScore]]:\n        with self._trace_manager.span(\n            name=\"search_relevance_chunks\", input=user_question\n        ) as span:\n            yield ChatEvent(\n                event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                payload=ChatStreamMessagePayload(\n                    state=ChatMessageSate.SEARCH_RELATED_DOCUMENTS,\n                    display=\"Retrieving the Most Relevant Documents\",\n                ),\n            )\n\n            relevance_chunks = self.retrieve_flow.search_relevant_chunks(user_question)\n\n            span.end(\n                output={\n                    \"relevance_chunks\": relevance_chunks,\n                }\n            )\n\n            return relevance_chunks\n\n    def _generate_answer(\n        self,\n        user_question: str,\n        knowledge_graph_context: str,\n        relevant_chunks: List[NodeWithScore],\n    ) -> Generator[ChatEvent, None, Tuple[str, List[SourceDocument]]]:\n        with self._trace_manager.span(\n            name=\"generate_answer\", input=user_question\n        ) as span:\n            # Initialize response synthesizer.\n            text_qa_template = RichPromptTemplate(\n                template_str=self.engine_config.llm.text_qa_prompt\n            )\n            text_qa_template = text_qa_template.partial_format(\n                current_date=datetime.now().strftime(\"%Y-%m-%d\"),\n                graph_knowledges=knowledge_graph_context,\n                original_question=self.user_question,\n            )\n            response_synthesizer = get_response_synthesizer(\n                llm=self._llm, text_qa_template=text_qa_template, streaming=True\n            )\n\n            # Initialize response.\n            response = response_synthesizer.synthesize(\n                query=user_question,\n                nodes=relevant_chunks,\n            )\n            source_documents = self.retrieve_flow.get_source_documents_from_nodes(\n                response.source_nodes\n            )\n            yield ChatEvent(\n                event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                payload=ChatStreamMessagePayload(\n                    state=ChatMessageSate.SOURCE_NODES,\n                    context=source_documents,\n                ),\n            )\n\n            # Generate response.\n            yield ChatEvent(\n                event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                payload=ChatStreamMessagePayload(\n                    state=ChatMessageSate.GENERATE_ANSWER,\n                    display=\"Generating a Precise Answer with AI\",\n                ),\n            )\n            response_text = \"\"\n            for word in response.response_gen:\n                response_text += word\n                yield ChatEvent(\n                    event_type=ChatEventType.TEXT_PART,\n                    payload=word,\n                )\n\n            if not response_text:\n                raise Exception(\"Got empty response from LLM\")\n\n            span.end(\n                output=response_text,\n                metadata={\n                    \"source_documents\": source_documents,\n                },\n            )\n\n            return response_text, source_documents\n\n    def _post_verification(\n        self, user_question: str, response_text: str, chat_id: UUID, message_id: int\n    ) -> Optional[str]:\n        # post verification to external service, will return the post verification result url\n        post_verification_url = self.engine_config.post_verification_url\n        post_verification_token = self.engine_config.post_verification_token\n\n        if not post_verification_url:\n            return None\n\n        external_request_id = f\"{chat_id}_{message_id}\"\n        qa_content = f\"User question: {user_question}\\n\\nAnswer:\\n{response_text}\"\n\n        with self._trace_manager.span(\n            name=\"post_verification\",\n            input={\n                \"external_request_id\": external_request_id,\n                \"qa_content\": qa_content,\n            },\n        ) as span:\n            try:\n                resp = requests.post(\n                    post_verification_url,\n                    json={\n                        \"external_request_id\": external_request_id,\n                        \"qa_content\": qa_content,\n                    },\n                    headers=(\n                        {\n                            \"Authorization\": f\"Bearer {post_verification_token}\",\n                        }\n                        if post_verification_token\n                        else {}\n                    ),\n                    timeout=10,\n                )\n                resp.raise_for_status()\n                job_id = resp.json()[\"job_id\"]\n                post_verification_link = urljoin(\n                    f\"{post_verification_url}/\", str(job_id)\n                )\n\n                span.end(\n                    output={\n                        \"post_verification_link\": post_verification_link,\n                    }\n                )\n\n                return post_verification_link\n            except Exception as e:\n                logger.exception(\"Failed to post verification: %s\", e.message)\n                return None\n\n    def _chat_finish(\n        self,\n        db_assistant_message: ChatMessage,\n        db_user_message: ChatMessage,\n        response_text: str,\n        knowledge_graph: KnowledgeGraphRetrievalResult = KnowledgeGraphRetrievalResult(),\n        source_documents: Optional[List[SourceDocument]] = [],\n        annotation_silent: bool = False,\n    ):\n        if not annotation_silent:\n            yield ChatEvent(\n                event_type=ChatEventType.MESSAGE_ANNOTATIONS_PART,\n                payload=ChatStreamMessagePayload(\n                    state=ChatMessageSate.FINISHED,\n                ),\n            )\n\n        post_verification_result_url = self._post_verification(\n            self.user_question,\n            response_text,\n            self.db_chat_obj.id,\n            db_assistant_message.id,\n        )\n\n        db_assistant_message.sources = [s.model_dump() for s in source_documents]\n        db_assistant_message.graph_data = knowledge_graph.to_stored_graph_dict()\n        db_assistant_message.content = response_text\n        db_assistant_message.post_verification_result_url = post_verification_result_url\n        db_assistant_message.updated_at = datetime.now(UTC)\n        db_assistant_message.finished_at = datetime.now(UTC)\n        self.db_session.add(db_assistant_message)\n\n        db_user_message.graph_data = knowledge_graph.to_stored_graph_dict()\n        db_user_message.updated_at = datetime.now(UTC)\n        db_user_message.finished_at = datetime.now(UTC)\n        self.db_session.add(db_user_message)\n        self.db_session.commit()\n\n        yield ChatEvent(\n            event_type=ChatEventType.DATA_PART,\n            payload=ChatStreamDataPayload(\n                chat=self.db_chat_obj,\n                user_message=db_user_message,\n                assistant_message=db_assistant_message,\n            ),\n        )\n\n    # TODO: Separate _external_chat() method into another ExternalChatFlow class, but at the same time, we need to\n    #  share some common methods through ChatMixin or BaseChatFlow.\n    def _external_chat(self) -> Generator[ChatEvent | str, None, None]:\n        ctx = langfuse_instrumentor_context.get().copy()\n        db_user_message, db_assistant_message = yield from self._chat_start()\n        langfuse_instrumentor_context.get().update(ctx)\n\n        cache_messages = None\n        goal, response_format = self.user_question, {}\n        if settings.ENABLE_QUESTION_CACHE and len(self.chat_history) == 0:\n            try:\n                logger.info(\n                    f\"start to find_best_answer_for_question with question: {self.user_question}\"\n                )\n                cache_messages = chat_repo.find_best_answer_for_question(\n                    self.db_session, self.user_question\n                )\n                if cache_messages and len(cache_messages) > 0:\n                    logger.info(\n                        f\"find_best_answer_for_question result {len(cache_messages)} for question {self.user_question}\"\n                    )\n            except Exception as e:\n                logger.error(\n                    f\"Failed to find best answer for question {self.user_question}: {e}\"\n                )\n\n        if not cache_messages or len(cache_messages) == 0:\n            try:\n                # 1. Generate the goal with the user question, knowledge graph and chat history.\n                goal, response_format = yield from self._generate_goal()\n\n                # 2. Check if the goal provided enough context information or need to clarify.\n                if self.engine_config.clarify_question:\n                    (\n                        need_clarify,\n                        need_clarify_response,\n                    ) = yield from self._clarify_question(\n                        user_question=goal, chat_history=self.chat_history\n                    )\n                    if need_clarify:\n                        yield from self._chat_finish(\n                            db_assistant_message=db_assistant_message,\n                            db_user_message=db_user_message,\n                            response_text=need_clarify_response,\n                            annotation_silent=True,\n                        )\n                        return\n            except Exception as e:\n                goal = self.user_question\n                logger.warning(\n                    f\"Failed to generate refined goal, fallback to use user question as goal directly: {e}\",\n                    exc_info=True,\n                    extra={},\n                )\n\n            cache_messages = None\n            if settings.ENABLE_QUESTION_CACHE:\n                try:\n                    logger.info(\n                        f\"start to find_recent_assistant_messages_by_goal with goal: {goal}, response_format: {response_format}\"\n                    )\n                    cache_messages = chat_repo.find_recent_assistant_messages_by_goal(\n                        self.db_session,\n                        {\"goal\": goal, \"Lang\": response_format.get(\"Lang\", \"English\")},\n                        90,\n                    )\n                    logger.info(\n                        f\"find_recent_assistant_messages_by_goal result {len(cache_messages)} for goal {goal}\"\n                    )\n                except Exception as e:\n                    logger.error(\n                        f\"Failed to find recent assistant messages by goal: {e}\"\n                    )\n\n        stream_chat_api_url = (\n            self.engine_config.external_engine_config.stream_chat_api_url\n        )\n        if cache_messages and len(cache_messages) > 0:\n            stackvm_response_text = cache_messages[0].content\n            task_id = cache_messages[0].meta.get(\"task_id\")\n            for chunk in stackvm_response_text.split(\". \"):\n                if chunk:\n                    if not chunk.endswith(\".\"):\n                        chunk += \". \"\n                    yield ChatEvent(\n                        event_type=ChatEventType.TEXT_PART,\n                        payload=chunk,\n                    )\n        else:\n            logger.debug(\n                f\"Chatting with external chat engine (api_url: {stream_chat_api_url}) to answer for user question: {self.user_question}\"\n            )\n            chat_params = {\n                \"goal\": goal,\n                \"response_format\": response_format,\n                \"namespace_name\": \"Default\",\n            }\n            res = requests.post(stream_chat_api_url, json=chat_params, stream=True)\n\n            # Notice: External type chat engine doesn't support non-streaming mode for now.\n            stackvm_response_text = \"\"\n            task_id = None\n            for line in res.iter_lines():\n                if not line:\n                    continue\n\n                # Append to final response text.\n                chunk = line.decode(\"utf-8\")\n                if chunk.startswith(\"0:\"):\n                    word = json.loads(chunk[2:])\n                    stackvm_response_text += word\n                    yield ChatEvent(\n                        event_type=ChatEventType.TEXT_PART,\n                        payload=word,\n                    )\n                else:\n                    yield line + b\"\\n\"\n\n                try:\n                    if chunk.startswith(\"8:\") and task_id is None:\n                        states = json.loads(chunk[2:])\n                        if len(states) > 0:\n                            # accesss task by http://endpoint/?task_id=$task_id\n                            task_id = states[0].get(\"task_id\")\n                except Exception as e:\n                    logger.error(f\"Failed to get task_id from chunk: {e}\")\n\n        response_text = stackvm_response_text\n        base_url = stream_chat_api_url.replace(\"/api/stream_execute_vm\", \"\")\n        try:\n            post_verification_result_url = self._post_verification(\n                goal,\n                response_text,\n                self.db_chat_obj.id,\n                db_assistant_message.id,\n            )\n            db_assistant_message.post_verification_result_url = (\n                post_verification_result_url\n            )\n        except Exception:\n            logger.error(\n                \"Specific error occurred during post verification job.\", exc_info=True\n            )\n\n        trace_url = f\"{base_url}?task_id={task_id}\" if task_id else \"\"\n        message_meta = {\n            \"task_id\": task_id,\n            \"goal\": goal,\n            **response_format,\n        }\n\n        db_assistant_message.content = response_text\n        db_assistant_message.trace_url = trace_url\n        db_assistant_message.meta = message_meta\n        db_assistant_message.updated_at = datetime.now(UTC)\n        db_assistant_message.finished_at = datetime.now(UTC)\n        self.db_session.add(db_assistant_message)\n\n        db_user_message.trace_url = trace_url\n        db_user_message.meta = message_meta\n        db_user_message.updated_at = datetime.now(UTC)\n        db_user_message.finished_at = datetime.now(UTC)\n        self.db_session.add(db_user_message)\n        self.db_session.commit()\n\n        yield ChatEvent(\n            event_type=ChatEventType.DATA_PART,\n            payload=ChatStreamDataPayload(\n                chat=self.db_chat_obj,\n                user_message=db_user_message,\n                assistant_message=db_assistant_message,\n            ),\n        )\n\n    def _generate_goal(self) -> Generator[ChatEvent, None, Tuple[str, dict]]:\n        try:\n            refined_question = yield from self._refine_user_question(\n                user_question=self.user_question,\n                chat_history=self.chat_history,\n                refined_question_prompt=self.engine_config.llm.generate_goal_prompt,\n                annotation_silent=True,\n            )\n\n            goal = refined_question.strip()\n            if goal.startswith(\"Goal: \"):\n                goal = goal[len(\"Goal: \") :].strip()\n        except Exception as e:\n            logger.error(f\"Failed to refine question with related knowledge graph: {e}\")\n            goal = self.user_question\n\n        response_format = {}\n        try:\n            clean_goal, response_format = parse_goal_response_format(goal)\n            logger.info(f\"clean goal: {clean_goal}, response_format: {response_format}\")\n            if clean_goal:\n                goal = clean_goal\n        except Exception as e:\n            logger.error(f\"Failed to parse goal and response format: {e}\")\n\n        return goal, response_format\n"
  },
  {
    "path": "backend/app/rag/chat/chat_service.py",
    "content": "from http import HTTPStatus\nimport logging\n\nfrom typing import Generator, List, Optional\nfrom uuid import UUID\n\nfrom fastapi import HTTPException\nfrom pydantic import BaseModel\nfrom sqlalchemy import text, delete\nfrom sqlmodel import Session, select, func\n\nfrom app.api.routes.models import (\n    RequiredConfigStatus,\n    OptionalConfigStatus,\n    NeedMigrationStatus,\n)\nfrom app.models import (\n    User,\n    ChatVisibility,\n    Chat as DBChat,\n    ChatMessage as DBChatMessage,\n    KnowledgeBase as DBKnowledgeBase,\n    RerankerModel as DBRerankerModel,\n    ChatEngine,\n)\nfrom app.models.recommend_question import RecommendQuestion\nfrom app.rag.chat.retrieve.retrieve_flow import RetrieveFlow, SourceDocument\nfrom app.rag.chat.stream_protocol import ChatEvent\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrievalResult,\n    StoredKnowledgeGraph,\n    RetrievedSubGraph,\n)\nfrom app.rag.knowledge_base.index_store import get_kb_tidb_graph_store\nfrom app.repositories import knowledge_base_repo\n\nfrom app.rag.chat.config import (\n    ChatEngineConfig,\n)\nfrom app.rag.types import (\n    ChatEventType,\n    ChatMessageSate,\n)\nfrom app.repositories import chat_engine_repo\nfrom app.repositories.embedding_model import embedding_model_repo\nfrom app.repositories.llm import llm_repo\nfrom app.site_settings import SiteSetting\nfrom llama_index.core.prompts.rich import RichPromptTemplate\n\nlogger = logging.getLogger(__name__)\n\n\nclass ChatResult(BaseModel):\n    chat_id: UUID\n    message_id: int\n    content: str\n    trace: Optional[str] = None\n    sources: Optional[List[SourceDocument]] = []\n\n\ndef get_final_chat_result(\n    generator: Generator[ChatEvent | str, None, None],\n) -> ChatResult:\n    trace, sources, content = None, [], \"\"\n    chat_id, message_id = None, None\n    for m in generator:\n        if not isinstance(m, ChatEvent):\n            continue\n        if m.event_type == ChatEventType.MESSAGE_ANNOTATIONS_PART:\n            if m.payload.state == ChatMessageSate.SOURCE_NODES:\n                sources = m.payload.context\n        elif m.event_type == ChatEventType.TEXT_PART:\n            content += m.payload\n        elif m.event_type == ChatEventType.DATA_PART:\n            chat_id = m.payload.chat.id\n            message_id = m.payload.assistant_message.id\n            trace = m.payload.assistant_message.trace_url\n        elif m.event_type == ChatEventType.ERROR_PART:\n            raise HTTPException(\n                status_code=HTTPStatus.INTERNAL_SERVER_ERROR,\n                detail=m.payload,\n            )\n        else:\n            pass\n    return ChatResult(\n        chat_id=chat_id,\n        message_id=message_id,\n        trace=trace,\n        sources=sources,\n        content=content,\n    )\n\n\ndef user_can_view_chat(chat: DBChat, user: Optional[User]) -> bool:\n    # Anonymous or public chat can be accessed by anyone\n    # Non-anonymous chat can be accessed by owner or superuser\n    if not chat.user_id or chat.visibility == ChatVisibility.PUBLIC:\n        return True\n    return user is not None and (user.is_superuser or chat.user_id == user.id)\n\n\ndef user_can_edit_chat(chat: DBChat, user: Optional[User]) -> bool:\n    if user is None:\n        return False\n    if user.is_superuser:\n        return True\n    return chat.user_id == user.id\n\n\ndef get_graph_data_from_chat_message(\n    db_session: Session,\n    chat_message: DBChatMessage,\n    engine_config: ChatEngineConfig,\n) -> Optional[KnowledgeGraphRetrievalResult]:\n    if not chat_message.graph_data:\n        return None\n\n    graph_data = chat_message.graph_data\n\n    # For forward compatibility.\n    if \"version\" not in graph_data:\n        kb = engine_config.get_knowledge_bases(db_session)[0]\n        graph_store = get_kb_tidb_graph_store(db_session, kb)\n        return graph_store.get_subgraph_by_relationship_ids(graph_data[\"relationships\"])\n\n    # Stored Knowledge Graph -> Retrieved Knowledge Graph\n    stored_kg = StoredKnowledgeGraph.model_validate(graph_data)\n    if stored_kg.knowledge_base_id is not None:\n        kb = knowledge_base_repo.must_get(db_session, stored_kg.knowledge_base_id)\n        graph_store = get_kb_tidb_graph_store(db_session, kb)\n        retrieved_kg = graph_store.get_subgraph_by_relationship_ids(\n            ids=stored_kg.relationships, query=stored_kg.query\n        )\n        return retrieved_kg\n    elif stored_kg.knowledge_base_ids is not None:\n        kg_store_map = {}\n        knowledge_base_set = set()\n        relationship_set = set()\n        entity_set = set()\n        subgraphs = []\n\n        for kb_id in stored_kg.knowledge_base_ids:\n            kb = knowledge_base_repo.must_get(db_session, kb_id)\n            knowledge_base_set.add(kb.to_descriptor())\n            kg_store = get_kb_tidb_graph_store(db_session, kb)\n            kg_store_map[kb_id] = kg_store\n\n        for stored_subgraph in stored_kg.subgraphs:\n            kg_store = kg_store_map.get(stored_subgraph.knowledge_base_id)\n            if kg_store is None:\n                continue\n            relationship_ids = stored_subgraph.relationships\n            subgraph = kg_store.get_subgraph_by_relationship_ids(\n                ids=relationship_ids,\n                query=stored_kg.query,\n            )\n            relationship_set.update(subgraph.relationships)\n            entity_set.update(subgraph.entities)\n            subgraphs.append(\n                RetrievedSubGraph(\n                    **subgraph.model_dump(),\n                )\n            )\n\n        return KnowledgeGraphRetrievalResult(\n            query=stored_kg.query,\n            knowledge_bases=list(knowledge_base_set),\n            relationships=list(relationship_set),\n            entities=list(entity_set),\n            subgraphs=subgraphs,\n        )\n    else:\n        return None\n\n\ndef get_chat_message_subgraph(\n    db_session: Session, chat_message: DBChatMessage\n) -> KnowledgeGraphRetrievalResult:\n    chat_engine: ChatEngine = chat_message.chat.engine\n    engine_name = chat_engine.name\n    engine_config = ChatEngineConfig.load_from_db(db_session, chat_engine.name)\n\n    # Try to get subgraph from `chat_message.graph_data`.\n    try:\n        knowledge_graph = get_graph_data_from_chat_message(\n            db_session, chat_message, engine_config\n        )\n        if knowledge_graph is not None:\n            return knowledge_graph\n    except Exception as e:\n        logger.error(\n            f\"Failed to get subgraph from chat_message.graph_data: {e}\", exc_info=True\n        )\n\n    # Try to get subgraph based on the chat message content.\n    # Notice: it use current chat engine config, not the snapshot stored in chat_message.\n    retriever = RetrieveFlow(\n        db_session=db_session,\n        engine_name=engine_name,\n        engine_config=engine_config,\n    )\n    knowledge_graph, _ = retriever.search_knowledge_graph(chat_message.content)\n    return knowledge_graph\n\n\ndef check_rag_required_config(session: Session) -> RequiredConfigStatus:\n    \"\"\"\n    Check if the required configuration items have been configured, it any of them is\n    missing, the RAG application can not complete its work.\n    \"\"\"\n    has_default_llm = llm_repo.has_default(session)\n    has_default_embedding_model = embedding_model_repo.has_default(session)\n    has_default_chat_engine = chat_engine_repo.has_default(session)\n    has_knowledge_base = session.scalar(select(func.count(DBKnowledgeBase.id))) > 0\n\n    return RequiredConfigStatus(\n        default_llm=has_default_llm,\n        default_embedding_model=has_default_embedding_model,\n        default_chat_engine=has_default_chat_engine,\n        knowledge_base=has_knowledge_base,\n    )\n\n\ndef check_rag_optional_config(session: Session) -> OptionalConfigStatus:\n    langfuse = bool(\n        SiteSetting.langfuse_host\n        and SiteSetting.langfuse_secret_key\n        and SiteSetting.langfuse_public_key\n    )\n    default_reranker = session.scalar(select(func.count(DBRerankerModel.id))) > 0\n    return OptionalConfigStatus(\n        langfuse=langfuse,\n        default_reranker=default_reranker,\n    )\n\n\ndef check_rag_config_need_migration(session: Session) -> NeedMigrationStatus:\n    \"\"\"\n    Check if any configuration needs to be migrated.\n    \"\"\"\n    chat_engines_without_kb_configured = session.exec(\n        select(ChatEngine.id)\n        .where(ChatEngine.deleted_at == None)\n        .where(\n            text(\n                \"JSON_EXTRACT(engine_options, '$.knowledge_base.linked_knowledge_bases') IS NULL AND \"\n                \"JSON_EXTRACT(engine_options, '$.knowledge_base.linked_knowledge_base') IS NULL\"\n            )\n        )\n    )\n\n    return NeedMigrationStatus(\n        chat_engines_without_kb_configured=chat_engines_without_kb_configured,\n    )\n\n\ndef remove_chat_message_recommend_questions(\n    db_session: Session,\n    chat_message_id: int,\n) -> None:\n    delete_stmt = delete(RecommendQuestion).where(\n        RecommendQuestion.chat_message_id == chat_message_id\n    )\n    db_session.exec(delete_stmt)\n    db_session.commit()\n\n\ndef get_chat_message_recommend_questions(\n    db_session: Session,\n    chat_message: DBChatMessage,\n    engine_name: str = \"default\",\n) -> List[str]:\n    chat_engine_config = ChatEngineConfig.load_from_db(db_session, engine_name)\n    llm = chat_engine_config.get_llama_llm(db_session)\n\n    statement = (\n        select(RecommendQuestion.questions)\n        .where(RecommendQuestion.chat_message_id == chat_message.id)\n        .with_for_update()  # using write lock in case the same chat message trigger multiple requests\n    )\n\n    questions = db_session.exec(statement).first()\n    if questions is not None:\n        return questions\n\n    prompt_template = RichPromptTemplate(\n        chat_engine_config.llm.further_questions_prompt\n    )\n    recommend_questions = llm.predict(\n        prompt_template,\n        chat_message_content=chat_message.content,\n    )\n    recommend_question_list = recommend_questions.splitlines()\n    recommend_question_list = [\n        question.strip() for question in recommend_question_list if question.strip()\n    ]\n\n    longest_question = 0\n    for question in recommend_question_list:\n        longest_question = max(longest_question, len(question))\n\n    # check the output by if the output with format and the length\n    if (\n        \"##\" in recommend_questions\n        or \"**\" in recommend_questions\n        or longest_question > 500\n    ):\n        regenerate_content = f\"\"\"\n        Please note that you are generating a question list. You previously generated it incorrectly; try again.\n        ----------------------------------------\n        {chat_message.content}\n        \"\"\"\n        # with format or too long for per question, it's not a question list, generate again\n        recommend_questions = llm.predict(\n            prompt_template,\n            chat_message_content=regenerate_content,\n        )\n\n    db_session.add(\n        RecommendQuestion(\n            chat_message_id=chat_message.id,\n            questions=recommend_question_list,\n        )\n    )\n    db_session.commit()\n\n    return recommend_question_list\n"
  },
  {
    "path": "backend/app/rag/chat/config.py",
    "content": "import logging\nimport dspy\n\nfrom typing import Optional, List\nfrom pydantic import BaseModel, Field\nfrom sqlmodel import Session\n\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.core.llms.llm import LLM\n\nfrom app.rag.postprocessors.metadata_post_filter import MetadataPostFilter\nfrom app.rag.retrievers.chunk.schema import VectorSearchRetrieverConfig\nfrom app.rag.retrievers.knowledge_graph.schema import KnowledgeGraphRetrieverConfig\nfrom app.rag.llms.dspy import get_dspy_lm_by_llama_llm\nfrom app.rag.llms.resolver import get_default_llm, resolve_llm\nfrom app.rag.rerankers.resolver import get_default_reranker_model, resolve_reranker\n\nfrom app.models import (\n    LLM as DBLLM,\n    RerankerModel as DBRerankerModel,\n    KnowledgeBase,\n    ChatEngine as DBChatEngine,\n)\nfrom app.repositories import chat_engine_repo, knowledge_base_repo\nfrom app.rag.default_prompt import (\n    DEFAULT_INTENT_GRAPH_KNOWLEDGE,\n    DEFAULT_NORMAL_GRAPH_KNOWLEDGE,\n    DEFAULT_CONDENSE_QUESTION_PROMPT,\n    DEFAULT_TEXT_QA_PROMPT,\n    DEFAULT_FURTHER_QUESTIONS_PROMPT,\n    DEFAULT_GENERATE_GOAL_PROMPT,\n    DEFAULT_CLARIFYING_QUESTION_PROMPT,\n)\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass LLMOption(BaseModel):\n    intent_graph_knowledge: str = DEFAULT_INTENT_GRAPH_KNOWLEDGE\n    normal_graph_knowledge: str = DEFAULT_NORMAL_GRAPH_KNOWLEDGE\n    condense_question_prompt: str = DEFAULT_CONDENSE_QUESTION_PROMPT\n    clarifying_question_prompt: str = DEFAULT_CLARIFYING_QUESTION_PROMPT\n    text_qa_prompt: str = DEFAULT_TEXT_QA_PROMPT\n    further_questions_prompt: str = DEFAULT_FURTHER_QUESTIONS_PROMPT\n    generate_goal_prompt: str = DEFAULT_GENERATE_GOAL_PROMPT\n\n\nclass VectorSearchOption(VectorSearchRetrieverConfig):\n    pass\n\n\nclass KnowledgeGraphOption(KnowledgeGraphRetrieverConfig):\n    enabled: bool = True\n    using_intent_search: bool = True\n\n\nclass ExternalChatEngine(BaseModel):\n    # TODO: add enable flag for this config.\n    stream_chat_api_url: str = None\n\n\nclass LinkedKnowledgeBase(BaseModel):\n    id: int\n\n\nclass KnowledgeBaseOption(BaseModel):\n    linked_knowledge_base: LinkedKnowledgeBase = None\n    linked_knowledge_bases: Optional[List[LinkedKnowledgeBase]] = Field(\n        default_factory=list\n    )\n\n\nclass ChatEngineConfig(BaseModel):\n    external_engine_config: Optional[ExternalChatEngine] = None\n\n    llm: LLMOption = LLMOption()\n\n    knowledge_base: KnowledgeBaseOption = KnowledgeBaseOption()\n    knowledge_graph: KnowledgeGraphOption = KnowledgeGraphOption()\n    vector_search: VectorSearchOption = VectorSearchOption()\n\n    refine_question_with_kg: bool = True\n    clarify_question: bool = False\n    further_questions: bool = False\n\n    post_verification_url: Optional[str] = None\n    post_verification_token: Optional[str] = None\n    hide_sources: bool = False\n\n    _db_chat_engine: Optional[DBChatEngine] = None\n    _db_llm: Optional[DBLLM] = None\n    _db_fast_llm: Optional[DBLLM] = None\n    _db_reranker: Optional[DBRerankerModel] = None\n\n    @property\n    def is_external_engine(self) -> bool:\n        return (\n            self.external_engine_config is not None\n            and self.external_engine_config.stream_chat_api_url\n        )\n\n    def get_db_chat_engine(self) -> Optional[DBChatEngine]:\n        return self._db_chat_engine\n\n    def get_linked_knowledge_base(self, session: Session) -> KnowledgeBase | None:\n        if not self.knowledge_base:\n            return None\n        return knowledge_base_repo.must_get(\n            session, self.knowledge_base.linked_knowledge_base.id\n        )\n\n    @classmethod\n    def load_from_db(cls, session: Session, engine_name: str) -> \"ChatEngineConfig\":\n        if not engine_name or engine_name == \"default\":\n            db_chat_engine = chat_engine_repo.get_default_engine(session)\n        else:\n            db_chat_engine = chat_engine_repo.get_engine_by_name(session, engine_name)\n\n        if not db_chat_engine:\n            logger.warning(\n                f\"Chat engine {engine_name} not found in DB, using default engine\"\n            )\n            db_chat_engine = chat_engine_repo.get_default_engine(session)\n\n        obj = cls.model_validate(db_chat_engine.engine_options)\n        obj._db_chat_engine = db_chat_engine\n        obj._db_llm = db_chat_engine.llm\n        obj._db_fast_llm = db_chat_engine.fast_llm\n        obj._db_reranker = db_chat_engine.reranker\n        return obj\n\n    def get_llama_llm(self, session: Session) -> LLM:\n        if not self._db_llm:\n            return get_default_llm(session)\n        return resolve_llm(\n            self._db_llm.provider,\n            self._db_llm.model,\n            self._db_llm.config,\n            self._db_llm.credentials,\n        )\n\n    def get_dspy_lm(self, session: Session) -> dspy.LM:\n        llama_llm = self.get_llama_llm(session)\n        return get_dspy_lm_by_llama_llm(llama_llm)\n\n    def get_fast_llama_llm(self, session: Session) -> LLM:\n        if not self._db_fast_llm:\n            return get_default_llm(session)\n        return resolve_llm(\n            self._db_fast_llm.provider,\n            self._db_fast_llm.model,\n            self._db_fast_llm.config,\n            self._db_fast_llm.credentials,\n        )\n\n    def get_fast_dspy_lm(self, session: Session) -> dspy.LM:\n        llama_llm = self.get_fast_llama_llm(session)\n        return get_dspy_lm_by_llama_llm(llama_llm)\n\n    # FIXME: Reranker top_n should be config in the retrieval config.\n    def get_reranker(\n        self, session: Session, top_n: int = None\n    ) -> Optional[BaseNodePostprocessor]:\n        if not self._db_reranker:\n            return get_default_reranker_model(session, top_n)\n\n        top_n = self._db_reranker.top_n if top_n is None else top_n\n        return resolve_reranker(\n            self._db_reranker.provider,\n            self._db_reranker.model,\n            top_n,\n            self._db_reranker.config,\n            self._db_reranker.credentials,\n        )\n\n    def get_metadata_filter(self) -> BaseNodePostprocessor:\n        return MetadataPostFilter(self.vector_search.metadata_filters)\n\n    def get_knowledge_bases(self, db_session: Session) -> List[KnowledgeBase]:\n        if not self.knowledge_base:\n            return []\n        kb_config: KnowledgeBaseOption = self.knowledge_base\n        linked_knowledge_base_ids = []\n        if len(kb_config.linked_knowledge_bases) == 0:\n            linked_knowledge_base_ids.append(kb_config.linked_knowledge_base.id)\n        else:\n            linked_knowledge_base_ids.extend(\n                [kb.id for kb in kb_config.linked_knowledge_bases]\n            )\n        knowledge_bases = knowledge_base_repo.get_by_ids(\n            db_session, knowledge_base_ids=linked_knowledge_base_ids\n        )\n        return knowledge_bases\n\n    def screenshot(self) -> dict:\n        return self.model_dump(\n            exclude={\n                \"llm\": [\n                    \"condense_question_prompt\",\n                    \"text_qa_prompt\",\n                    \"refine_prompt\",\n                    \"intent_graph_knowledge\",\n                    \"normal_graph_knowledge\",\n                    \"generate_goal_prompt\",\n                    \"further_questions_prompt\",\n                    \"clarifying_question_prompt\",\n                ],\n                \"post_verification_token\": True,\n            }\n        )\n"
  },
  {
    "path": "backend/app/rag/chat/retrieve/retrieve_flow.py",
    "content": "import logging\nfrom datetime import datetime\nfrom typing import List, Optional, Tuple\n\nfrom llama_index.core.instrumentation import get_dispatcher\nfrom llama_index.core.llms import LLM\nfrom llama_index.core.schema import NodeWithScore, QueryBundle\nfrom llama_index.core.prompts.rich import RichPromptTemplate\nfrom pydantic import BaseModel\nfrom sqlmodel import Session\n\nfrom app.models import (\n    Document as DBDocument,\n    KnowledgeBase,\n)\nfrom app.rag.chat.config import ChatEngineConfig\nfrom app.rag.retrievers.knowledge_graph.fusion_retriever import (\n    KnowledgeGraphFusionRetriever,\n)\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrievalResult,\n    KnowledgeGraphRetrieverConfig,\n)\nfrom app.rag.retrievers.chunk.fusion_retriever import ChunkFusionRetriever\nfrom app.repositories import document_repo\n\ndispatcher = get_dispatcher(__name__)\nlogger = logging.getLogger(__name__)\n\n\nclass SourceDocument(BaseModel):\n    id: int\n    name: str\n    source_uri: Optional[str] = None\n\n\nclass RetrieveFlow:\n    def __init__(\n        self,\n        db_session: Session,\n        engine_name: str = \"default\",\n        engine_config: Optional[ChatEngineConfig] = None,\n        llm: Optional[LLM] = None,\n        fast_llm: Optional[LLM] = None,\n        knowledge_bases: Optional[List[KnowledgeBase]] = None,\n    ):\n        self.db_session = db_session\n        self.engine_name = engine_name\n        self.engine_config = engine_config or ChatEngineConfig.load_from_db(\n            db_session, engine_name\n        )\n        self.db_chat_engine = self.engine_config.get_db_chat_engine()\n\n        # Init LLM.\n        self._llm = llm or self.engine_config.get_llama_llm(self.db_session)\n        self._fast_llm = fast_llm or self.engine_config.get_fast_llama_llm(\n            self.db_session\n        )\n\n        # Load knowledge bases.\n        self.knowledge_bases = (\n            knowledge_bases or self.engine_config.get_knowledge_bases(self.db_session)\n        )\n        self.knowledge_base_ids = [kb.id for kb in self.knowledge_bases]\n\n    def retrieve(self, user_question: str) -> List[NodeWithScore]:\n        if self.engine_config.refine_question_with_kg:\n            # 1. Retrieve Knowledge graph related to the user question.\n            _, knowledge_graph_context = self.search_knowledge_graph(user_question)\n\n            # 2. Refine the user question using knowledge graph and chat history.\n            self._refine_user_question(user_question, knowledge_graph_context)\n\n        # 3. Search relevant chunks based on the user question.\n        return self.search_relevant_chunks(user_question=user_question)\n\n    def retrieve_documents(self, user_question: str) -> List[DBDocument]:\n        nodes = self.retrieve(user_question)\n        return self.get_documents_from_nodes(nodes)\n\n    def search_knowledge_graph(\n        self, user_question: str\n    ) -> Tuple[KnowledgeGraphRetrievalResult, str]:\n        kg_config = self.engine_config.knowledge_graph\n        knowledge_graph = KnowledgeGraphRetrievalResult()\n        knowledge_graph_context = \"\"\n        if kg_config is not None and kg_config.enabled:\n            kg_retriever = KnowledgeGraphFusionRetriever(\n                db_session=self.db_session,\n                knowledge_base_ids=[kb.id for kb in self.knowledge_bases],\n                llm=self._llm,\n                use_query_decompose=kg_config.using_intent_search,\n                config=KnowledgeGraphRetrieverConfig.model_validate(\n                    kg_config.model_dump(exclude={\"enabled\", \"using_intent_search\"})\n                ),\n            )\n            knowledge_graph = kg_retriever.retrieve_knowledge_graph(user_question)\n            knowledge_graph_context = self._get_knowledge_graph_context(knowledge_graph)\n        return knowledge_graph, knowledge_graph_context\n\n    def _get_knowledge_graph_context(\n        self, knowledge_graph: KnowledgeGraphRetrievalResult\n    ) -> str:\n        if self.engine_config.knowledge_graph.using_intent_search:\n            kg_context_template = RichPromptTemplate(\n                self.engine_config.llm.intent_graph_knowledge\n            )\n            return kg_context_template.format(\n                sub_queries=knowledge_graph.to_subqueries_dict(),\n            )\n        else:\n            kg_context_template = RichPromptTemplate(\n                self.engine_config.llm.normal_graph_knowledge\n            )\n            return kg_context_template.format(\n                entities=knowledge_graph.entities,\n                relationships=knowledge_graph.relationships,\n            )\n\n    def _refine_user_question(\n        self, user_question: str, knowledge_graph_context: str\n    ) -> str:\n        prompt_template = RichPromptTemplate(\n            self.engine_config.llm.condense_question_prompt\n        )\n        refined_question = self._fast_llm.predict(\n            prompt_template,\n            graph_knowledges=knowledge_graph_context,\n            question=user_question,\n            current_date=datetime.now().strftime(\"%Y-%m-%d\"),\n        )\n        return refined_question.strip().strip(\".\\\"'!\")\n\n    def search_relevant_chunks(self, user_question: str) -> List[NodeWithScore]:\n        retriever = ChunkFusionRetriever(\n            db_session=self.db_session,\n            knowledge_base_ids=self.knowledge_base_ids,\n            llm=self._llm,\n            config=self.engine_config.vector_search,\n            use_query_decompose=False,\n        )\n        return retriever.retrieve(QueryBundle(user_question))\n\n    def get_documents_from_nodes(self, nodes: List[NodeWithScore]) -> List[DBDocument]:\n        document_ids = [n.node.metadata[\"document_id\"] for n in nodes]\n        documents = document_repo.fetch_by_ids(self.db_session, document_ids)\n        # Keep the original order of document ids, which is sorted by similarity.\n        return sorted(documents, key=lambda x: document_ids.index(x.id))\n\n    def get_source_documents_from_nodes(\n        self, nodes: List[NodeWithScore]\n    ) -> List[SourceDocument]:\n        documents = self.get_documents_from_nodes(nodes)\n        return [\n            SourceDocument(\n                id=doc.id,\n                name=doc.name,\n                source_uri=doc.source_uri,\n            )\n            for doc in documents\n        ]\n"
  },
  {
    "path": "backend/app/rag/chat/stream_protocol.py",
    "content": "import json\nfrom dataclasses import dataclass\n\nfrom pydantic import BaseModel\n\nfrom app.models import ChatMessage, Chat\nfrom app.rag.types import ChatEventType, ChatMessageSate\n\n\nclass ChatStreamPayload:\n    def dump(self):\n        pass\n\n\n@dataclass\nclass ChatStreamDataPayload(ChatStreamPayload):\n    chat: Chat\n    user_message: ChatMessage\n    assistant_message: ChatMessage\n\n    def dump(self):\n        return [\n            {\n                \"chat\": self.chat.model_dump(mode=\"json\"),\n                \"user_message\": self.user_message.model_dump(mode=\"json\"),\n                \"assistant_message\": self.assistant_message.model_dump(mode=\"json\"),\n            }\n        ]\n\n\n@dataclass\nclass ChatStreamMessagePayload(ChatStreamPayload):\n    state: ChatMessageSate = ChatMessageSate.TRACE\n    display: str = \"\"\n    context: dict | list | str | BaseModel | None = None\n    message: str = \"\"\n\n    def dump(self):\n        if isinstance(self.context, list):\n            context = [c.model_dump() for c in self.context]\n        elif isinstance(self.context, BaseModel):\n            context = self.context.model_dump()\n        else:\n            context = self.context\n\n        return [\n            {\n                \"state\": self.state.name,\n                \"display\": self.display,\n                \"context\": context,\n                \"message\": self.message,\n            }\n        ]\n\n\n@dataclass\nclass ChatEvent:\n    event_type: ChatEventType\n    payload: str | ChatStreamPayload | None = None\n\n    def encode(self, charset) -> bytes:\n        body = self.payload\n\n        if isinstance(body, ChatStreamPayload):\n            body = body.dump()\n\n        body = json.dumps(body, separators=(\",\", \":\"))\n\n        return f\"{self.event_type.value}:{body}\\n\".encode(charset)\n"
  },
  {
    "path": "backend/app/rag/datasource/__init__.py",
    "content": "from sqlmodel import Session\nfrom typing import Any\nfrom uuid import UUID\n\nfrom app.models import DataSourceType\nfrom .base import BaseDataSource\nfrom .file import FileDataSource\nfrom .web_sitemap import WebSitemapDataSource\nfrom .web_single_page import WebSinglePageDataSource\n\n\ndef get_data_source_loader(\n    session: Session,\n    knowledge_base_id: int,\n    data_source_type: DataSourceType,\n    data_source_id: int,\n    user_id: UUID,\n    config: Any,\n) -> BaseDataSource:\n    data_source_cls = None\n\n    match data_source_type:\n        case DataSourceType.FILE:\n            data_source_cls = FileDataSource\n        case DataSourceType.WEB_SITEMAP:\n            data_source_cls = WebSitemapDataSource\n        case DataSourceType.WEB_SINGLE_PAGE:\n            data_source_cls = WebSinglePageDataSource\n        case _:\n            raise ValueError(\"Data source type not supported\")\n\n    return data_source_cls(session, knowledge_base_id, data_source_id, user_id, config)\n"
  },
  {
    "path": "backend/app/rag/datasource/base.py",
    "content": "from abc import ABC, abstractmethod\nfrom uuid import UUID\nfrom typing import Generator, Any\nfrom sqlmodel import Session\n\nfrom app.models import Document\n\n\nclass BaseDataSource(ABC):\n    session: Session\n    knowledge_base_id: int\n    data_source_id: int\n    user_id: UUID\n    config: Any\n\n    def __init__(\n        self,\n        session: Session,\n        knowledge_base_id: int,\n        data_source_id: int,\n        user_id: UUID,\n        config: Any,\n        **kwargs,\n    ):\n        self.config = config\n        self.session = session\n        self.knowledge_base_id = knowledge_base_id\n        self.data_source_id = data_source_id\n        self.user_id = user_id\n        self.validate_config()\n\n    @abstractmethod\n    def validate_config(self):\n        raise NotImplementedError\n\n    @abstractmethod\n    def load_documents(self) -> Generator[Document, None, None]:\n        raise NotImplementedError\n"
  },
  {
    "path": "backend/app/rag/datasource/consts.py",
    "content": "IGNORE_TAGS = [\n    \"noscript\",\n    \"title\",\n    \"script\",\n    \"style\",\n    \"meta\",\n    \"head\",\n    \"header\",\n    \"footer\",\n    \"nav\",\n    \"symbol\",\n    \"aside\",\n]\nIGNORE_CLASSES = [\"header\", \"footer\", \"sidebar\"]\n"
  },
  {
    "path": "backend/app/rag/datasource/file.py",
    "content": "import logging\nimport docx\nimport pptx\nimport openpyxl\nfrom pydantic import BaseModel\nfrom typing import Generator, IO\nfrom pypdf import PdfReader\n\nfrom app.models import Document, Upload\nfrom app.file_storage import default_file_storage\nfrom app.types import MimeTypes\nfrom .base import BaseDataSource\n\nlogger = logging.getLogger(__name__)\n\n\nclass FileConfig(BaseModel):\n    file_id: int\n\n\nclass FileDataSource(BaseDataSource):\n    def validate_config(self):\n        if not isinstance(self.config, list):\n            raise ValueError(\"config must be a list\")\n        for f_config in self.config:\n            FileConfig.model_validate(f_config)\n\n    def load_documents(self) -> Generator[Document, None, None]:\n        for f_config in self.config:\n            upload_id = f_config[\"file_id\"]\n            upload = self.session.get(Upload, upload_id)\n            if upload is None:\n                logger.error(f\"Upload with id {upload_id} not found\")\n                continue\n\n            with default_file_storage.open(upload.path) as f:\n                if upload.mime_type == MimeTypes.PDF:\n                    content = extract_text_from_pdf(f)\n                    mime_type = MimeTypes.PLAIN_TXT\n                elif upload.mime_type == MimeTypes.DOCX:\n                    content = extract_text_from_docx(f)\n                    mime_type = MimeTypes.PLAIN_TXT\n                elif upload.mime_type == MimeTypes.PPTX:\n                    content = extract_text_from_pptx(f)\n                    mime_type = MimeTypes.PLAIN_TXT\n                elif upload.mime_type == MimeTypes.XLSX:\n                    content = extract_text_from_xlsx(f)\n                    mime_type = MimeTypes.PLAIN_TXT\n                elif upload.mime_type == MimeTypes.MARKDOWN:\n                    content = f.read()\n                    mime_type = MimeTypes.MARKDOWN\n                else:\n                    content = f.read()\n                    mime_type = upload.mime_type\n\n            document = Document(\n                name=upload.name,\n                hash=hash(content),\n                content=content,\n                mime_type=mime_type,\n                knowledge_base_id=self.knowledge_base_id,\n                data_source_id=self.data_source_id,\n                user_id=self.user_id,\n                source_uri=upload.path,\n                last_modified_at=upload.created_at,\n            )\n            yield document\n\n\ndef extract_text_from_pdf(file: IO) -> str:\n    reader = PdfReader(file)\n    return \"\\n\\n\".join([page.extract_text() for page in reader.pages])\n\n\ndef extract_text_from_docx(file: IO) -> str:\n    document = docx.Document(file)\n    full_text = []\n    for paragraph in document.paragraphs:\n        full_text.append(paragraph.text)\n    return \"\\n\\n\".join(full_text)\n\n\ndef extract_text_from_pptx(file: IO) -> str:\n    presentation = pptx.Presentation(file)\n    full_text = []\n    for slide in presentation.slides:\n        for shape in slide.shapes:\n            if hasattr(shape, \"text\"):\n                full_text.append(shape.text)\n    return \"\\n\\n\".join(full_text)\n\n\ndef extract_text_from_xlsx(file: IO) -> str:\n    wb = openpyxl.load_workbook(file)\n    full_text = []\n    for sheet in wb.worksheets:\n        full_text.append(f\"Sheet: {sheet.title}\")\n        sheet_string = \"\\n\".join(\n            \",\".join(map(str, row)) for row in sheet.iter_rows(values_only=True)\n        )\n        full_text.append(sheet_string)\n    return \"\\n\\n\".join(full_text)\n"
  },
  {
    "path": "backend/app/rag/datasource/web_base.py",
    "content": "import logging\nfrom datetime import datetime, UTC\nfrom typing import Generator\nfrom playwright.sync_api import sync_playwright\nfrom bs4 import BeautifulSoup\nfrom markdownify import MarkdownConverter\n\nfrom app.models import Document\nfrom app.rag.datasource.consts import IGNORE_TAGS, IGNORE_CLASSES\n\nlogger = logging.getLogger(__name__)\n\n\ndef load_web_documents(\n    knowledge_base_id: int, data_source_id: int, urls: list[str]\n) -> Generator[Document, None, None]:\n    visited = set()\n    with sync_playwright() as p:\n        browser = p.chromium.launch(headless=True)\n        for url in urls:\n            page = browser.new_page()\n            response = page.goto(url)\n            final_url = page.url\n            if final_url in visited:\n                continue\n\n            if response.status >= 400:\n                logger.error(\n                    f\"Failed to load page: {url}, response status: {response.status()}, skipping\"\n                )\n                continue\n            soup = BeautifulSoup(page.content(), \"html.parser\")\n            for t in IGNORE_TAGS:\n                for tag in soup.find_all(t):\n                    tag.extract()\n            for c in IGNORE_CLASSES:\n                for tag in soup.find_all(class_=c):\n                    tag.extract()\n            content = MarkdownConverter().convert_soup(soup)\n            title = page.title()\n            visited.add(final_url)\n            document = Document(\n                name=title,\n                hash=hash(content),\n                content=content,\n                mime_type=\"text/plain\",\n                knowledge_base_id=knowledge_base_id,\n                data_source_id=data_source_id,\n                source_uri=final_url,\n                last_modified_at=datetime.now(UTC),\n            )\n            yield document\n        browser.close()\n"
  },
  {
    "path": "backend/app/rag/datasource/web_single_page.py",
    "content": "import logging\nfrom pydantic import BaseModel\nfrom typing import Generator, List\n\nfrom app.models import Document\nfrom app.rag.datasource.base import BaseDataSource\nfrom app.rag.datasource.web_base import load_web_documents\n\nlogger = logging.getLogger(__name__)\n\n\nclass WebSinglePageConfig(BaseModel):\n    urls: List[str]\n\n\nclass WebSinglePageDataSource(BaseDataSource):\n    def validate_config(self):\n        WebSinglePageConfig.model_validate(self.config)\n\n    def load_documents(self) -> Generator[Document, None, None]:\n        if \"url\" in self.config:\n            # TODO: remove this once we have a proper config\n            urls = [self.config[\"url\"]]\n        else:\n            urls = self.config[\"urls\"]\n\n        yield from load_web_documents(self.knowledge_base_id, self.data_source_id, urls)\n"
  },
  {
    "path": "backend/app/rag/datasource/web_sitemap.py",
    "content": "import logging\nfrom typing import Generator\nfrom urllib.parse import urlparse, urljoin\n\nimport requests\nfrom pydantic import BaseModel\nfrom bs4 import BeautifulSoup\n\nfrom app.models import Document\nfrom app.rag.datasource.base import BaseDataSource\nfrom app.rag.datasource.web_base import load_web_documents\n\nlogger = logging.getLogger(__name__)\n\n\nclass WebSitemapConfig(BaseModel):\n    url: str\n\n\ndef _ensure_absolute_url(source_url: str, maybe_relative_url: str) -> str:\n    if not urlparse(maybe_relative_url).netloc:\n        return urljoin(source_url, maybe_relative_url)\n    return maybe_relative_url\n\n\ndef extract_urls_from_sitemap(sitemap_url: str) -> list[str]:\n    response = requests.get(sitemap_url)\n    response.raise_for_status()\n\n    soup = BeautifulSoup(response.content, \"html.parser\")\n    result = [\n        _ensure_absolute_url(sitemap_url, loc_tag.text)\n        for loc_tag in soup.find_all(\"loc\")\n    ]\n    if not result:\n        raise ValueError(f\"No URLs found in sitemap {sitemap_url}\")\n    return result\n\n\nclass WebSitemapDataSource(BaseDataSource):\n    def validate_config(self):\n        WebSitemapConfig.model_validate(self.config)\n\n    def load_documents(self) -> Generator[Document, None, None]:\n        sitemap_url = self.config[\"url\"]\n        urls = extract_urls_from_sitemap(sitemap_url)\n        logger.info(f\"Found {len(urls)} URLs in sitemap {sitemap_url}\")\n        yield from load_web_documents(self.knowledge_base_id, self.data_source_id, urls)\n"
  },
  {
    "path": "backend/app/rag/default_prompt.py",
    "content": "DEFAULT_INTENT_GRAPH_KNOWLEDGE = \"\"\"\\\nGiven a list of prerequisite questions and their relevant knowledge for the user's main question, when conflicts in meaning arise, prioritize the relationship with the higher weight and the more recent version.\n\nKnowledge sub-queries:\n\n{% for sub_query, data in sub_queries.items() %}\n\nSub-query: {{ sub_query }}\n\n  - Entities:\n{% for entity in data['entities'] %}\n    - Name: {{ entity.name }}\n      Description: {{ entity.description }}\n{% endfor %}\n\n  - Relationships:\n{% for relationship in data['relationships'] %}\n    - Description: {{ relationship.rag_description }}\n      Weight: {{ relationship.weight }}\n{% endfor %}\n\n{% endfor %}\n\"\"\"\n\nDEFAULT_NORMAL_GRAPH_KNOWLEDGE = \"\"\"\\\nGiven a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\n\n---------------------\nEntities:\n\n{% for entity in entities %}\n- Name: {{ entity.name }}\n  Description: {{ entity.description }}\n{% endfor %}\n\n---------------------\n\nKnowledge relationships:\n\n{% for relationship in relationships %}\n\n- Description: {{ relationship.rag_description }}\n- Weight: {{ relationship.weight }}\n- Last Modified At: {{ relationship.last_modified_at }}\n- Meta: {{ relationship.meta | tojson(indent=2) }}\n\n{% endfor %}\n\"\"\"\n\nDEFAULT_CLARIFYING_QUESTION_PROMPT = \"\"\"\\\n---------------------\nThe prerequisite questions and their relevant knowledge for the user's main question.\n---------------------\n\n{{graph_knowledges}}\n\n---------------------\n\nTask:\nGiven the conversation between the user and ASSISTANT, along with the follow-up message from the user, and the provided prerequisite questions and relevant knowledge, determine if the user's question is clear and specific enough for a confident response. \n\nIf the question lacks necessary details or context, identify the specific ambiguities and generate a clarifying question to address them.\nIf the question is clear and answerable, return exact \"False\" as the response.\n\nInstructions:\n1. Assess Information Sufficiency:\n   - Evaluate if the user's question provides enough detail to generate a precise answer based on the prerequisite questions, relevant knowledge, and conversation history.\n   - If the user's question is too vague or lacks key information, identify what additional information would be necessary for clarity.\n\n2. Generate a Clarifying Question:\n   - If the question is clear and answerable, return exact \"False\" as the response.\n   - If clarification is needed, return a specific question to ask the user, directly addressing the information gap. Avoid general questions; focus on the specific details required for an accurate answer.\n\n3. Use the same language to ask the clarifying question as the user's original question.\n\nExample 1:\n\nuser: \"Does TiDB support foreign keys?\"\nRelevant Knowledge: TiDB supports foreign keys starting from version 6.6.0.\n\nResponse:\n\nWhich version of TiDB are you using?\n\nExample 2:\n\nuser: \"Does TiDB support nested transaction?\"\nRelevant Knowledge: TiDB supports nested transaction starting from version 6.2.0.\n\nResponse:\n\nWhich version of TiDB are you using?\n\nExample 3:\n\nuser: \"Does TiDB support foreign keys? I'm using TiDB 6.5.0.\"\nRelevant Knowledge: TiDB supports foreign keys starting from version 6.6.0.\n\nResponse:\n\nFalse\n\nYour Turn:\n\nChat history:\n\n{{chat_history}}\n\n---------------------\n\nFollow-up question:\n\n{{question}}\n\nResponse:\n\"\"\"\n\nDEFAULT_CONDENSE_QUESTION_PROMPT = \"\"\"\\\nCurrent Date: {{current_date}}\n---------------------\nThe prerequisite questions and their relevant knowledge for the user's main question.\n---------------------\n\n{{graph_knowledges}}\n\n---------------------\n\nTask:\nGiven the conversation between the Human and Assistant, along with the follow-up message from the Human, and the provided prerequisite questions and relevant knowledge, refine the Human’s follow-up message into a standalone, detailed question.\n\nInstructions:\n1. Focus on the latest query from the Human, ensuring it is given the most weight.\n2. Incorporate Key Information:\n  - Use the prerequisite questions and their relevant knowledge to add specific details to the follow-up question.\n  - Replace ambiguous terms or references in the follow-up question with precise information from the provided knowledge. Example: Replace “latest version” with the actual version number mentioned in the knowledge.\n3. Utilize Conversation Context:\n  - Incorporate relevant context and background information from the conversation history to enhance the question's specificity.\n4. Optimize for Retrieval:\n  - Ensure the refined question emphasizes specific and relevant terms to maximize the effectiveness of a vector search for retrieving precise and comprehensive information.\n5. Grounded and Factual:\n  - Make sure the refined question is grounded in and directly based on the user's follow-up question and the provided knowledge.\n  - Do not introduce information that is not supported by the knowledge or conversation history.\n6. Give the language hint for the answer:\n  - Add a hint after the question like \"(Answer language: English)\", or \"(Answer language: Chinese)\", etc.\n  - This language hint should be exactly same with the language of the original question.\n  - If the original question has part of other language aside from English, please use the language of another language rather than English. Example: \"tidb tableread慢会是哪些原因\", it should be Chinese.\n\nExample:\n\nChat History:\n\nHuman: \"I'm interested in the performance improvements in the latest version of TiDB.\"\nAssistant: \"TiDB version 8.1 was released recently with significant performance enhancements over version 6.5.\"\n\nFollow-up Question:\n\n\"Can you tell me more about these improvements?\"\n\nPrerequisite Questions and Relevant Knowledge:\n\n- Prerequisite Question: What is the latest version of TiDB?\n- Relevant Knowledge: The latest version of TiDB is 8.1.\n\n...\n\nRefined Standalone Question:\n\n\"Can you provide detailed information about the performance improvements introduced in TiDB version 8.1 compared to version 6.5? (Answer language: English)\"\n\nYour Turn:\n\nChat history:\n\n{{chat_history}}\n\n---------------------\n\nFollowup question:\n\n{{question}}\n\n---------------------\n\nRefined standalone question:\n\"\"\"\n\nDEFAULT_TEXT_QA_PROMPT = \"\"\"\\\nCurrent Date: {{current_date}}\n---------------------\nKnowledge graph information is below\n---------------------\n\n{{graph_knowledges}}\n\n---------------------\nContext information is below.\n---------------------\n\n{{context_str}}\n\n---------------------\n\nAnswer Format:\n\nUse markdown footnote syntax (for example: [^1]) to indicate sources you used.\nEach footnote must correspond to a unique source. Do not use the same source for multiple footnotes.\n\n### Examples of Correct Footnote Usage (no the unique sources and diverse sources):\n[^1]: [TiDB Overview | PingCAP Docs](https://docs.pingcap.com/tidb/stable/overview)\n[^2]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/stable/architecture)\n\n### Examples of Incorrect Footnote Usage (Avoid duplicating the same source for multiple footnotes):\n[^1]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\n[^2]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\n[^3]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/dev/overview)\n[^4]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/stable/overview)\n\n---------------------\n\nAnswer Language:\n\nFollow the language of the language hint after the Refined Question.\nIf the language hint is not provided, use the language that the original questions used.\n\n---------------------\n\nAs a customer support assistant, please do not fabricate any knowledge. If you cannot get knowledge from the context, please just directly state \"you do not know\", rather than constructing nonexistent and potentially fake information!!!\n\nFirst, analyze the provided context information without assuming prior knowledge. Identify all relevant aspects of knowledge contained within. Then, from various perspectives and angles, answer questions as thoroughly and comprehensively as possible to better address and resolve the user's issue.\n\nThe Original questions is:\n\n{{original_question}}\n\nThe Refined Question used to search:\n\n{{query_str}}\n\nAnswer:\n\"\"\"\n\nDEFAULT_FURTHER_QUESTIONS_PROMPT = \"\"\"\\\nThe chat message content is:\n\n{{chat_message_content}}\n\n---------------------\nTask:\nBased on the provided chat message, generate 3–5 follow-up questions that are relevant to the content. Each question should explore the topic in greater detail, seek clarification, or introduce new angles for discussion.\n\nInstructions:\n1. Build upon the key information, themes, or insights within the provided chat message.\n2. Aim for variety in question type (clarifying, probing, or exploratory) to encourage a deeper conversation.\n3. Ensure each question logically follows from the context of the provided chat message.\n4. Keep questions concise yet insightful to maximize engagement.\n5. Use the same language with the chat message content.\n6. Each question should end with a question mark.\n7. Each question should be in a new line, DO NOT add any indexes or blank lines, just output the questions.\n\nNow, generate 3–5 follow-up questions below:\n\"\"\"\n\nDEFAULT_GENERATE_GOAL_PROMPT = \"\"\"\\\nGiven the conversation history between the User and Assistant, along with the latest follow-up question from the User, perform the following tasks:\n\n1. **Language Detection**:\n    - Analyze the User's follow-up question to determine the language used.\n\n2. **Context Classification**:\n    - **Determine Relevance to TiDB**:\n        - Assess whether the follow-up question is related to TiDB products, support, or any TiDB-related context.\n    - **Set Background Accordingly**:\n        - **If Related to TiDB**:\n            - Set the background to encompass the relevant TiDB context. This may include aspects like TiDB features, configurations, best practices, troubleshooting, or general consulting related to TiDB.\n            - Example backgrounds:\n                - \"TiDB product configuration and optimization.\"\n                - \"TiDB troubleshooting and support.\"\n                - \"TiDB feature consultation.\"\n        - **If Unrelated to TiDB**:\n            - Set the background to \"Other topics.\"\n\n3. **Goal Generation**:\n    - **Clarify Intent to Avoid Ambiguity**:\n        - **Instructional Guidance**:\n            - If the User's question seeks guidance or a method (e.g., starts with \"How to\"), ensure the goal reflects a request for a step-by-step guide or best practices.\n        - **Information Retrieval**:\n            - If the User's question seeks specific information or confirmation (e.g., starts with \"Can you\" or \"Is it possible\"), rephrase it to focus on providing the requested information or verification without implying that the assistant should perform any actions.\n            - **Important**: Do not interpret these questions as requests for the assistant to execute operations. Instead, understand whether the user seeks to confirm certain information or requires a proposed solution, and restrict responses to information retrieval and guidance based on available documentation.\n    - **Reformulate the Latest User Follow-up Question**:\n        - Ensure the question is clear, directive, and suitable for a Q&A format.\n    - **Specify Additional Details**:\n        - **Detected Language**: Clearly indicate the language.\n        - **Desired Answer Format**: Specify if the answer should be in text, table, code snippet, etc.\n        - **Additional Requirements**: Include any other necessary instructions to tailor the response appropriately.\n\n4. **Output**:\n    - Produce a goal string in the following format:\n      \"[Refined Question] (Lang: [Detected Language], Format: [Format], Background: [Specified Goal Scenario])\"\n\n**Examples**:\n\n**Example 1**:\n\nChat history:\n\n[]\n\nFollow-up question:\n\n\"tidb encryption at rest 会影响数据压缩比例吗？\"\n\nGoal:\n\nDoes encryption at rest in TiDB affect the data compression ratio? (Lang: Chinese, Format: text, Background: TiDB product related consulting.)\n\n---------------------\n\n**Example 2**:\n\nChat history:\n\n[]\n\nFollow-up question:\n\n\"干嘛的？\"\n\nGoal:\n\nWhat can you do? (Lang: Chinese, Format: text, Background: General inquiry about the assistant's capabilities.)\n\n---------------------\n\n**Example 3**:\n\nChat history:\n\n[]\n\nFollow-up question:\n\n\"oracle 怎么样？\"\n\nGoal:\n\nHow is Oracle? (Lang: Chinese, Format: text, Background: Other topics.)\n\n---------------------\n\n**Example 4**:\n\nChat history:\n\n[]\n\nFollow-up question:\n\n\"Why is TiDB Serverless up to 70% cheaper than MySQL RDS? (use a table if possible)\"\n\nGoal:\n\nWhy is TiDB Serverless up to 70% cheaper than MySQL RDS? Please provide a comparison in a table format if possible. (Lang: English, Format: table, Background: Cost comparison between TiDB Serverless and MySQL RDS.)\n\n---------------------\n\n**Example 5 (Enhanced for Clarity and Guidance)**:\n\nChat history:\n\n[]\n\nFollow-up question:\n\n\"能否找到 tidb 中哪些视图的定义中包含已经被删除的表？\"\n\nGoal:\n\nHow to find which views in TiDB have definitions that include tables that have been deleted? (Lang: Chinese, Format: text, Background: TiDB product related consulting.)\n\n---------------------\n\n**Your Task**:\n\nChat history:\n\n{{chat_history}}\n\nFollow-up question:\n\n{{question}}\n\nGoal:\n\"\"\"\n"
  },
  {
    "path": "backend/app/rag/embeddings/local/local_embedding.py",
    "content": "\"\"\"Jina embeddings file.\"\"\"\n\nfrom typing import Any, List, Optional\nimport requests\n\nfrom llama_index.core.base.embeddings.base import DEFAULT_EMBED_BATCH_SIZE\nfrom llama_index.core.bridge.pydantic import Field, PrivateAttr\nfrom llama_index.core.callbacks.base import CallbackManager\nfrom llama_index.core.embeddings import BaseEmbedding\n\nMAX_BATCH_SIZE = 500\n\n\nclass _APICaller:\n    def __init__(\n        self,\n        model: str,\n        api_url: str,\n        normalize_embeddings: bool = True,\n        **kwargs: Any,\n    ) -> None:\n        self.api_url = api_url\n        self.model = model\n        self.normalize_embeddings = normalize_embeddings\n        self._session = requests.Session()\n\n    def get_embeddings(self, sentences: list[str]) -> List[List[float]]:\n        \"\"\"Get embeddings.\"\"\"\n        # Call Jina AI Embedding API\n        resp = self._session.post(  # type: ignore\n            self.api_url,\n            json={\n                \"sentences\": sentences,\n                \"model\": self.model,\n                \"normalize_embeddings\": self.normalize_embeddings,\n            },\n        )\n        resp.raise_for_status()\n        resp_json = resp.json()\n        if \"embeddings\" not in resp_json:\n            raise RuntimeError(\n                f\"Call local embedding api {self.api_url} failed {resp.status_code}\"\n            )\n\n        return resp_json[\"embeddings\"]\n\n    async def aget_embeddings(self, sentences: list[str]) -> List[List[float]]:\n        \"\"\"Asynchronously get text embeddings.\"\"\"\n        import aiohttp\n\n        async with aiohttp.ClientSession(trust_env=True) as session:\n            async with session.post(\n                self.api_url,\n                json={\n                    \"sentences\": sentences,\n                    \"model\": self.model,\n                },\n            ) as response:\n                resp = await response.json()\n                response.raise_for_status()\n                return resp[\"embeddings\"]\n\n\nclass LocalEmbedding(BaseEmbedding):\n    model: str = Field(\n        default=\"BAAI/bge-m3\",\n        description=\"The model to use when calling Jina AI API\",\n    )\n\n    _encoding_queries: str = PrivateAttr()\n    _encoding_documents: str = PrivateAttr()\n    _api: Any = PrivateAttr()\n\n    def __init__(\n        self,\n        model: str = \"BAAI/bge-m3\",\n        api_url: str = \"http://127.0.0.1:5001/api/v1/embedding\",\n        normalize_embeddings: bool = True,\n        embed_batch_size: int = DEFAULT_EMBED_BATCH_SIZE,\n        callback_manager: Optional[CallbackManager] = None,\n        **kwargs: Any,\n    ) -> None:\n        super().__init__(\n            embed_batch_size=embed_batch_size,\n            callback_manager=callback_manager,\n            model=model,\n            **kwargs,\n        )\n        self._api: _APICaller = _APICaller(\n            model=model, api_url=api_url, normalize_embeddings=normalize_embeddings\n        )\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"LocalEmbedding\"\n\n    def _get_query_embedding(self, query: str) -> List[float]:\n        \"\"\"Get query embedding.\"\"\"\n        return self._api.get_embeddings([query])[0]\n\n    async def _aget_query_embedding(self, query: str) -> List[float]:\n        \"\"\"The asynchronous version of _get_query_embedding.\"\"\"\n        result = await self._api.aget_embeddings([query])\n        return result[0]\n\n    def _get_text_embedding(self, text: str) -> List[float]:\n        \"\"\"Get text embedding.\"\"\"\n        return self._get_text_embeddings([text])[0]\n\n    async def _aget_text_embedding(self, text: str) -> List[float]:\n        \"\"\"Asynchronously get text embedding.\"\"\"\n        result = await self._aget_text_embeddings([text])\n        return result[0]\n\n    def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:\n        return self._api.get_embeddings(texts)\n\n    async def _aget_text_embeddings(\n        self,\n        texts: List[str],\n    ) -> List[List[float]]:\n        return await self._api.aget_embeddings(texts)\n"
  },
  {
    "path": "backend/app/rag/embeddings/open_like/openai_like_embedding.py",
    "content": "from typing import Any, List, Optional\n\nfrom llama_index.core.base.embeddings.base import DEFAULT_EMBED_BATCH_SIZE\nfrom llama_index.core.bridge.pydantic import Field, PrivateAttr\nfrom llama_index.core.callbacks.base import CallbackManager\nfrom llama_index.core.embeddings import BaseEmbedding\nfrom openai import OpenAI, AsyncOpenAI\n\n\nclass OpenAILikeEmbedding(BaseEmbedding):\n    # We cannot directly call the llama-index's API because it limited the model name\n    # And the 'embedding-2' or 'embedding-3' is not one of the OpenAI's model name\n\n    model: str = Field(\n        default=\"embedding-3\",\n        description=\"The model to use when calling Zhipu AI API\",\n    )\n    _client: OpenAI = PrivateAttr()\n    _aclient: AsyncOpenAI = PrivateAttr()\n\n    def __init__(\n        self,\n        api_key: str,\n        model: str = \"embedding-3\",\n        api_base: str = \"https://open.bigmodel.cn/api/paas/v4/\",\n        embed_batch_size: int = DEFAULT_EMBED_BATCH_SIZE,\n        callback_manager: Optional[CallbackManager] = None,\n        **kwargs: Any,\n    ) -> None:\n        super().__init__(\n            embed_batch_size=embed_batch_size,\n            callback_manager=callback_manager,\n            model=model,\n            **kwargs,\n        )\n\n        self._model_kwargs = kwargs or {}\n        self.model = model\n        self._client = OpenAI(api_key=api_key, base_url=api_base)\n        self._aclient = AsyncOpenAI(api_key=api_key, base_url=api_base)\n\n    def get_embeddings(self, sentences: list[str]) -> List[List[float]]:\n        \"\"\"Get embeddings.\"\"\"\n        # Call Zhipu AI Embedding API via OpenAI client\n        embedding_objs = self._client.embeddings.create(\n            input=sentences, model=self.model, **self._model_kwargs\n        ).data\n        embeddings = [obj.embedding for obj in embedding_objs]\n\n        return embeddings\n\n    async def aget_embeddings(self, sentences: list[str]) -> List[List[float]]:\n        \"\"\"Asynchronously get text embeddings.\"\"\"\n        result = await self._aclient.embeddings.create(\n            input=sentences, model=self.model, **self._model_kwargs\n        )\n        embeddings = [obj.embedding for obj in result.data]\n\n        return embeddings\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"OpenAILikeEmbedding\"\n\n    def _get_query_embedding(self, query: str) -> List[float]:\n        \"\"\"Get query embedding.\"\"\"\n        return self.get_embeddings([query])[0]\n\n    async def _aget_query_embedding(self, query: str) -> List[float]:\n        \"\"\"The asynchronous version of _get_query_embedding.\"\"\"\n        result = await self.aget_embeddings([query])\n        return result[0]\n\n    def _get_text_embedding(self, text: str) -> List[float]:\n        \"\"\"Get text embedding.\"\"\"\n        return self._get_text_embeddings([text])[0]\n\n    async def _aget_text_embedding(self, text: str) -> List[float]:\n        \"\"\"Asynchronously get text embedding.\"\"\"\n        result = await self._aget_text_embeddings([text])\n        return result[0]\n\n    def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:\n        return self.get_embeddings(texts)\n\n    async def _aget_text_embeddings(\n        self,\n        texts: List[str],\n    ) -> List[List[float]]:\n        return await self.aget_embeddings(texts)\n"
  },
  {
    "path": "backend/app/rag/embeddings/provider.py",
    "content": "import enum\n\nfrom typing import List\nfrom pydantic import BaseModel\n\n\nclass EmbeddingProvider(str, enum.Enum):\n    OPENAI = \"openai\"\n    JINA = \"jina\"\n    COHERE = \"cohere\"\n    BEDROCK = \"bedrock\"\n    OLLAMA = \"ollama\"\n    GITEEAI = \"giteeai\"\n    LOCAL = \"local\"\n    OPENAI_LIKE = \"openai_like\"\n    AZURE_OPENAI = \"azure_openai\"\n\n\nclass EmbeddingProviderOption(BaseModel):\n    provider: EmbeddingProvider\n    provider_display_name: str | None = None\n    provider_description: str | None = None\n    provider_url: str | None = None\n    default_embedding_model: str\n    embedding_model_description: str\n    default_config: dict = {}\n    config_description: str = \"\"\n    default_credentials: str | dict = \"\"\n    credentials_display_name: str\n    credentials_description: str\n    credentials_type: str = \"str\"\n\n\nembedding_provider_options: List[EmbeddingProviderOption] = [\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.OPENAI,\n        provider_display_name=\"OpenAI\",\n        provider_description=\"The OpenAI API provides a simple interface for developers to create an intelligence layer in their applications, powered by OpenAI's state of the art models.\",\n        provider_url=\"https://platform.openai.com\",\n        default_embedding_model=\"text-embedding-3-small\",\n        embedding_model_description=\"Find more information about OpenAI Embedding at https://platform.openai.com/docs/guides/embeddings\",\n        credentials_display_name=\"OpenAI API Key\",\n        credentials_description=\"The API key of OpenAI, you can find it in https://platform.openai.com/api-keys\",\n        credentials_type=\"str\",\n        default_credentials=\"sk-****\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.JINA,\n        provider_display_name=\"JinaAI\",\n        provider_description=\"Jina AI provides multimodal, bilingual long-context embeddings for search and RAG\",\n        provider_url=\"https://jina.ai/embeddings/\",\n        default_embedding_model=\"jina-embeddings-v2-base-en\",\n        embedding_model_description=\"Find more information about Jina AI Embeddings at https://jina.ai/embeddings/\",\n        credentials_display_name=\"Jina API Key\",\n        credentials_description=\"The API key of Jina, you can find it in https://jina.ai/embeddings/\",\n        credentials_type=\"str\",\n        default_credentials=\"jina_****\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.COHERE,\n        provider_display_name=\"Cohere\",\n        provider_description=\"Cohere provides industry-leading large language models (LLMs) and RAG capabilities tailored to meet the needs of enterprise use cases that solve real-world problems.\",\n        provider_url=\"https://cohere.com/embeddings\",\n        default_embedding_model=\"embed-multilingual-v3.0\",\n        embedding_model_description=\"Documentation: https://docs.cohere.com/docs/cohere-embed\",\n        credentials_display_name=\"Cohere API Key\",\n        credentials_description=\"You can get one from https://dashboard.cohere.com/api-keys\",\n        credentials_type=\"str\",\n        default_credentials=\"*****\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.BEDROCK,\n        provider_display_name=\"Bedrock\",\n        provider_description=\"Amazon Bedrock is a fully managed foundation models service.\",\n        provider_url=\"https://docs.aws.amazon.com/bedrock/\",\n        default_embedding_model=\"amazon.titan-embed-text-v2:0\",\n        embedding_model_description=\"\",\n        credentials_display_name=\"AWS Bedrock Credentials JSON\",\n        credentials_description=\"The JSON Object of AWS Credentials, refer to https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-global\",\n        credentials_type=\"dict\",\n        default_credentials={\n            \"aws_access_key_id\": \"****\",\n            \"aws_secret_access_key\": \"****\",\n            \"aws_region_name\": \"us-west-2\",\n        },\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.OLLAMA,\n        provider_display_name=\"Ollama\",\n        provider_description=\"Ollama is a lightweight framework for building and running large language models and embed models.\",\n        provider_url=\"https://ollama.com\",\n        default_embedding_model=\"nomic-embed-text\",\n        embedding_model_description=\"Documentation: https://ollama.com/blog/embedding-models\",\n        default_config={\n            \"api_base\": \"http://localhost:11434\",\n        },\n        config_description=\"api_base is the base URL of the Ollama server, ensure it can be accessed from this server.\",\n        credentials_display_name=\"Ollama API Key\",\n        credentials_description=\"Ollama doesn't require an API key, set a dummy string here is ok\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.OPENAI_LIKE,\n        provider_display_name=\"OpenAI Like\",\n        provider_description=\"OpenAI-Like is a set of platforms that provide text embeddings similar to OpenAI. Such as ZhiPuAI.\",\n        provider_url=\"https://open.bigmodel.cn/dev/api/vector/embedding-3\",\n        default_embedding_model=\"embedding-3\",\n        embedding_model_description=\"\",\n        credentials_display_name=\"OpenAI Like API Key\",\n        credentials_description=\"The API key of OpenAI Like. For ZhipuAI, you can find it in https://open.bigmodel.cn/usercenter/apikeys\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.GITEEAI,\n        provider_display_name=\"Gitee AI\",\n        provider_description=\"Gitee AI is a third-party model provider that offers ready-to-use cutting-edge model APIs for AI developers.\",\n        provider_url=\"https://ai.gitee.com\",\n        default_embedding_model=\"bge-large-zh-v1.5\",\n        embedding_model_description=\"Find more information about Gitee AI Embeddings at https://ai.gitee.com/docs/openapi/v1#tag/%E7%89%B9%E5%BE%81%E6%8A%BD%E5%8F%96/POST/embeddings\",\n        credentials_display_name=\"Gitee AI API Key\",\n        credentials_description=\"The API key of Gitee AI, you can find it in https://ai.gitee.com/dashboard/settings/tokens\",\n        credentials_type=\"str\",\n        default_credentials=\"****\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.AZURE_OPENAI,\n        provider_display_name=\"Azure OpenAI\",\n        provider_description=\"Azure OpenAI is a cloud-based AI service that provides a suite of AI models and tools for developers to build intelligent applications.\",\n        provider_url=\"https://azure.microsoft.com/en-us/products/ai-services/openai-service\",\n        default_embedding_model=\"text-embedding-3-small\",\n        embedding_model_description=\"Before using this option, you need to deploy an Azure OpenAI API and model, see https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource.\",\n        default_config={\n            \"azure_endpoint\": \"https://<your-resource-name>.openai.azure.com/\",\n            \"api_version\": \"<your-api-version>\",\n        },\n        credentials_display_name=\"Azure OpenAI API Key\",\n        credentials_description=\"The API key of Azure OpenAI\",\n        credentials_type=\"str\",\n        default_credentials=\"****\",\n    ),\n    EmbeddingProviderOption(\n        provider=EmbeddingProvider.LOCAL,\n        provider_display_name=\"Local Embedding\",\n        provider_description=\"Autoflow's local embedding server, deployed on your own infrastructure and powered by sentence-transformers.\",\n        default_embedding_model=\"BAAI/bge-m3\",\n        embedding_model_description=\"Find more models in huggingface.\",\n        default_config={\n            \"api_url\": \"http://local-embedding-reranker:5001/api/v1/embedding\",\n        },\n        config_description=\"api_url is the embedding endpoint url serviced by the autoflow local embedding server.\",\n        credentials_display_name=\"Local Embedding API Key\",\n        credentials_description=\"Local Embedding server doesn't require an API key, set a dummy string here is ok.\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n]\n"
  },
  {
    "path": "backend/app/rag/embeddings/resolver.py",
    "content": "from typing import Optional\n\nfrom llama_index.embeddings.azure_openai import AzureOpenAIEmbedding\nfrom sqlmodel import Session\n\nfrom llama_index.core.base.embeddings.base import BaseEmbedding\nfrom llama_index.embeddings.openai import OpenAIEmbedding\nfrom llama_index.embeddings.jinaai import JinaEmbedding\nfrom llama_index.embeddings.cohere import CohereEmbedding\nfrom llama_index.embeddings.bedrock import BedrockEmbedding\nfrom llama_index.embeddings.ollama import OllamaEmbedding\n\nfrom app.rag.embeddings.open_like.openai_like_embedding import OpenAILikeEmbedding\nfrom app.rag.embeddings.local.local_embedding import LocalEmbedding\n\nfrom app.repositories.embedding_model import embedding_model_repo\nfrom app.rag.embeddings.provider import EmbeddingProvider\n\n\ndef resolve_embed_model(\n    provider: EmbeddingProvider,\n    model: str,\n    config: dict,\n    credentials: str | list | dict | None,\n) -> BaseEmbedding:\n    match provider:\n        case EmbeddingProvider.OPENAI:\n            return OpenAIEmbedding(\n                model=model,\n                api_key=credentials,\n                **config,\n            )\n        case EmbeddingProvider.JINA:\n            return JinaEmbedding(\n                model=model,\n                api_key=credentials,\n                **config,\n            )\n        case EmbeddingProvider.COHERE:\n            return CohereEmbedding(\n                model_name=model,\n                cohere_api_key=credentials,\n                **config,\n            )\n        case EmbeddingProvider.BEDROCK:\n            return BedrockEmbedding(\n                model_name=model,\n                aws_access_key_id=credentials[\"aws_access_key_id\"],\n                aws_secret_access_key=credentials[\"aws_secret_access_key\"],\n                region_name=credentials[\"aws_region_name\"],\n                **config,\n            )\n        case EmbeddingProvider.OLLAMA:\n            return OllamaEmbedding(\n                model_name=model,\n                **config,\n            )\n        case EmbeddingProvider.LOCAL:\n            return LocalEmbedding(\n                model=model,\n                **config,\n            )\n        case EmbeddingProvider.GITEEAI:\n            return OpenAILikeEmbedding(\n                model=model,\n                api_base=\"https://ai.gitee.com/v1\",\n                api_key=credentials,\n                **config,\n            )\n        case EmbeddingProvider.AZURE_OPENAI:\n            return AzureOpenAIEmbedding(\n                model=model,\n                api_key=credentials,\n                **config,\n            )\n        case EmbeddingProvider.OPENAI_LIKE:\n            return OpenAILikeEmbedding(\n                model=model,\n                api_key=credentials,\n                **config,\n            )\n        case _:\n            raise ValueError(f\"Got unknown embedding provider: {provider}\")\n\n\ndef get_default_embed_model(session: Session) -> Optional[BaseEmbedding]:\n    db_embed_model = embedding_model_repo.get_default(session)\n    if not db_embed_model:\n        return None\n    return resolve_embed_model(\n        db_embed_model.provider,\n        db_embed_model.model,\n        db_embed_model.config,\n        db_embed_model.credentials,\n    )\n\n\ndef must_get_default_embed_model(session: Session) -> BaseEmbedding:\n    db_embed_model = embedding_model_repo.must_get_default(session)\n    return resolve_embed_model(\n        db_embed_model.provider,\n        db_embed_model.model,\n        db_embed_model.config,\n        db_embed_model.credentials,\n    )\n"
  },
  {
    "path": "backend/app/rag/indices/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/__init__.py",
    "content": "from .base import KnowledgeGraphIndex\n\n__all__ = [\"KnowledgeGraphIndex\"]\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/base.py",
    "content": "import dspy\nimport logging\n\nfrom typing import Any, Dict, List, Optional, Sequence\nfrom llama_index.core.data_structs import IndexLPG\nfrom llama_index.core.callbacks import CallbackManager\nfrom llama_index.core.indices.base import BaseIndex\nfrom llama_index.core.storage.docstore.types import RefDocInfo\nfrom llama_index.core.storage.storage_context import StorageContext\nfrom llama_index.core.schema import BaseNode, TransformComponent\nimport llama_index.core.instrumentation as instrument\nfrom app.rag.indices.knowledge_graph.extractor import SimpleGraphExtractor\nfrom app.rag.indices.knowledge_graph.graph_store import KnowledgeGraphStore\n\n\nlogger = logging.getLogger(__name__)\n\ndispatcher = instrument.get_dispatcher(__name__)\n\n\nclass KnowledgeGraphIndex(BaseIndex[IndexLPG]):\n    \"\"\"An index for a property graph.\n\n    Args:\n        nodes (Optional[Sequence[BaseNode]]):\n            A list of nodes to insert into the index.\n        dspy_lm (dspy.BaseLLM):\n            The language model of dspy to use for extracting triplets.\n        callback_manager (Optional[CallbackManager]):\n            The callback manager to use.\n        transformations (Optional[List[TransformComponent]]):\n            A list of transformations to apply to the nodes before inserting them into the index.\n            These are applied prior to the `kg_extractors`.\n        storage_context (Optional[StorageContext]):\n            The storage context to use.\n        show_progress (bool):\n            Whether to show progress bars for transformations. Defaults to `False`.\n    \"\"\"\n\n    index_struct_cls = IndexLPG\n\n    def __init__(\n        self,\n        dspy_lm: dspy.LM,\n        kg_store: KnowledgeGraphStore,\n        nodes: Optional[Sequence[BaseNode]] = None,\n        # parent class params\n        callback_manager: Optional[CallbackManager] = None,\n        **kwargs: Any,\n    ) -> None:\n        self._dspy_lm = dspy_lm\n        self._kg_store = kg_store\n        super().__init__(\n            nodes=nodes,\n            callback_manager=callback_manager,\n            **kwargs,\n        )\n\n    @classmethod\n    def from_existing(\n        cls: \"KnowledgeGraphIndex\",\n        dspy_lm: dspy.LM,\n        kg_store: KnowledgeGraphStore,\n        # parent class params\n        callback_manager: Optional[CallbackManager] = None,\n        transformations: Optional[List[TransformComponent]] = None,\n        storage_context: Optional[StorageContext] = None,\n        show_progress: bool = False,\n        **kwargs: Any,\n    ) -> \"KnowledgeGraphIndex\":\n        return cls(\n            dspy_lm=dspy_lm,\n            kg_store=kg_store,\n            nodes=[],  # no nodes to insert\n            callback_manager=callback_manager,\n            transformations=transformations,\n            storage_context=storage_context,\n            show_progress=show_progress,\n            **kwargs,\n        )\n\n    def _insert_nodes(self, nodes: Sequence[BaseNode]):\n        \"\"\"Insert nodes to the index struct.\"\"\"\n        if len(nodes) == 0:\n            return nodes\n\n        extractor = SimpleGraphExtractor(dspy_lm=self._dspy_lm)\n        for node in nodes:\n            entities_df, rel_df = extractor.extract(\n                text=node.get_content(),\n                node=node,\n            )\n            self._kg_store.save(node.node_id, entities_df, rel_df)\n\n    def _build_index_from_nodes(self, nodes: Optional[Sequence[BaseNode]]) -> IndexLPG:\n        \"\"\"Build index from nodes.\"\"\"\n        nodes = self._insert_nodes(nodes or [])\n        return IndexLPG()\n\n    def as_retriever(self, **kwargs: Any):\n        \"\"\"Return a retriever for the index.\"\"\"\n        # Our retriever params is more complex than the base retriever,\n        # so we can't use the base retriever.\n        raise NotImplementedError(\n            \"Retriever not implemented for KnowledgeGraphIndex, use `retrieve_with_weight` instead.\"\n        )\n\n    def _insert(self, nodes: Sequence[BaseNode], **insert_kwargs: Any) -> None:\n        \"\"\"Index-specific logic for inserting nodes to the index struct.\"\"\"\n        self._insert_nodes(nodes)\n\n    def ref_doc_info(self) -> Dict[str, RefDocInfo]:\n        \"\"\"Retrieve a dict mapping of ingested documents and their nodes+metadata.\"\"\"\n        raise NotImplementedError(\n            \"Ref doc info not implemented for KnowledgeGraphIndex. \"\n            \"All inserts are already upserts.\"\n        )\n\n    def _delete_node(self, node_id: str, **delete_kwargs: Any) -> None:\n        \"\"\"Delete a node.\"\"\"\n        raise NotImplementedError(\n            \"Delete node not implemented for KnowledgeGraphIndex.\"\n        )\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/extractor.py",
    "content": "import logging\nfrom copy import deepcopy\nimport pandas as pd\nimport dspy\nfrom typing import Mapping, Optional, List\n\nfrom dspy import Predict\nfrom llama_index.core.schema import BaseNode\n\nfrom app.rag.indices.knowledge_graph.schema import (\n    Entity,\n    Relationship,\n    KnowledgeGraph,\n    EntityCovariateInput,\n    EntityCovariateOutput,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass ExtractGraphTriplet(dspy.Signature):\n    \"\"\"Carefully analyze the provided text from database documentation and community blogs to thoroughly identify all entities related to database technologies, including both general concepts and specific details.\n\n    Follow these Step-by-Step Analysis:\n\n    1. Extract Meaningful Entities:\n      - Identify all significant nouns, proper nouns, and technical terminologies that represent database-related concepts, objects, components, features, issues, key steps, execute order, user case, locations, versions, or any substantial entities.\n      - Ensure that you capture entities across different levels of detail, from high-level overviews to specific technical specifications, to create a comprehensive representation of the subject matter.\n      - Choose names for entities that are specific enough to indicate their meaning without additional context, avoiding overly generic terms.\n      - Consolidate similar entities to avoid redundancy, ensuring each represents a distinct concept at appropriate granularity levels.\n\n    2. Extract Metadata to claim the entities:\n      - Carefully review the provided text, focusing on identifying detailed covariates associated with each entity.\n      - Extract and link the covariates (which is a comprehensive json TREE, the first field is always: \"topic\") to their respective entities.\n      - Ensure all extracted covariates is clearly connected to the correct entity for accuracy and comprehensive understanding.\n      - Ensure that all extracted covariates are factual and verifiable within the text itself, without relying on external knowledge or assumptions.\n      - Collectively, the covariates should provide a thorough and precise summary of the entity's characteristics as described in the source material.\n\n    3. Establish Relationships:\n      - Carefully examine the text to identify all relationships between clearly-related entities, ensuring each relationship is correctly captured with accurate details about the interactions.\n      - Analyze the context and interactions between the identified entities to determine how they are interconnected, focusing on actions, associations, dependencies, or similarities.\n      - Clearly define the relationships, ensuring accurate directionality that reflects the logical or functional dependencies among entities. \\\n         This means identifying which entity is the source, which is the target, and what the nature of their relationship is (e.g., $source_entity depends on $target_entity for $relationship).\n\n    Some key points to consider:\n      - Please endeavor to extract all meaningful entities and relationships from the text, avoid subsequent additional gleanings.\n\n    Objective: Produce a detailed and comprehensive knowledge graph that captures the full spectrum of entities mentioned in the text, along with their interrelations, reflecting both broad concepts and intricate details specific to the database domain.\n\n    Please only response in JSON format.\n    \"\"\"\n\n    text = dspy.InputField(\n        desc=\"a paragraph of text to extract entities and relationships to form a knowledge graph\"\n    )\n    knowledge: KnowledgeGraph = dspy.OutputField(\n        desc=\"Graph representation of the knowledge extracted from the text.\"\n    )\n\n\nclass ExtractCovariate(dspy.Signature):\n    \"\"\"Please carefully review the provided text and entities list which are already identified in the text. Focusing on identifying detailed covariates associated with each entities provided.\n    Extract and link the covariates (which is a comprehensive json TREE, the first field is always: \"topic\") to their respective entities.\n    Ensure all extracted covariates is clearly connected to the correct entity for accuracy and comprehensive understanding.\n    Ensure that all extracted covariates are factual and verifiable within the text itself, without relying on external knowledge or assumptions.\n    Collectively, the covariates should provide a thorough and precise summary of the entity's characteristics as described in the source material.\n\n    Please only response in JSON format.\n    \"\"\"\n\n    text = dspy.InputField(\n        desc=\"a paragraph of text to extract covariates to claim the entities.\"\n    )\n\n    entities: List[EntityCovariateInput] = dspy.InputField(\n        desc=\"List of entities identified in the text.\"\n    )\n    covariates: List[EntityCovariateOutput] = dspy.OutputField(\n        desc=\"Graph representation of the knowledge extracted from the text.\"\n    )\n\n\ndef get_relation_metadata_from_node(node: BaseNode):\n    metadata = deepcopy(node.metadata)\n    for key in [\n        \"_node_content\",\n        \"_node_type\",\n        \"excerpt_keywords\",\n        \"questions_this_excerpt_can_answer\",\n        \"section_summary\",\n    ]:\n        metadata.pop(key, None)\n    metadata[\"chunk_id\"] = node.node_id\n    return metadata\n\n\nclass Extractor(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.prog_graph = Predict(ExtractGraphTriplet)\n        self.prog_covariates = Predict(ExtractCovariate)\n\n    def forward(self, text):\n        with dspy.settings.context(lm=self.dspy_lm):\n            pred_graph = self.prog_graph(text=text)\n\n            # extract the covariates\n            entities_for_covariates = [\n                EntityCovariateInput(\n                    name=entity.name,\n                    description=entity.description,\n                )\n                for entity in pred_graph.knowledge.entities\n            ]\n\n            pred_covariates = self.prog_covariates(\n                text=text,\n                entities=entities_for_covariates,\n            )\n\n            # replace the entities with the covariates\n            for entity in pred_graph.knowledge.entities:\n                for covariate in pred_covariates.covariates:\n                    if entity.name == covariate.name:\n                        entity.metadata = covariate.covariates\n\n            return pred_graph\n\n\nclass SimpleGraphExtractor:\n    def __init__(\n        self, dspy_lm: dspy.LM, complied_extract_program_path: Optional[str] = None\n    ):\n        self.extract_prog = Extractor(dspy_lm=dspy_lm)\n        if complied_extract_program_path is not None:\n            self.extract_prog.load(complied_extract_program_path)\n\n    def extract(self, text: str, node: BaseNode):\n        pred = self.extract_prog(text=text)\n        metadata = get_relation_metadata_from_node(node)\n\n        # Ensure all entities have proper metadata dictionary structure\n        for entity in pred.knowledge.entities:\n            if entity.metadata is None or not isinstance(entity.metadata, dict):\n                entity.metadata = {\"topic\": \"Unknown\", \"status\": \"auto-generated\"}\n\n        return self._to_df(\n            pred.knowledge.entities, pred.knowledge.relationships, metadata\n        )\n\n    def _to_df(\n        self,\n        entities: list[Entity],\n        relationships: list[Relationship],\n        extra_meta: Mapping[str, str],\n    ):\n        # Create lists to store dictionaries for entities and relationships\n        entities_data = []\n        relationships_data = []\n\n        # Iterate over parsed entities and relationships to create dictionaries\n        for entity in entities:\n            entity_dict = {\n                \"name\": entity.name,\n                \"description\": entity.description,\n                \"meta\": entity.metadata,\n            }\n            entities_data.append(entity_dict)\n\n        mapped_entities = {entity[\"name\"]: entity for entity in entities_data}\n\n        for relationship in relationships:\n            source_entity_description = \"\"\n            if relationship.source_entity not in mapped_entities:\n                new_source_entity = {\n                    \"name\": relationship.source_entity,\n                    \"description\": (\n                        f\"Derived from from relationship: \"\n                        f\"{relationship.source_entity} -> {relationship.relationship_desc} -> {relationship.target_entity}\"\n                    ),\n                    \"meta\": {\"status\": \"need-revised\"},\n                }\n                entities_data.append(new_source_entity)\n                mapped_entities[relationship.source_entity] = new_source_entity\n                source_entity_description = new_source_entity[\"description\"]\n            else:\n                source_entity_description = mapped_entities[relationship.source_entity][\n                    \"description\"\n                ]\n\n            target_entity_description = \"\"\n            if relationship.target_entity not in mapped_entities:\n                new_target_entity = {\n                    \"name\": relationship.target_entity,\n                    \"description\": (\n                        f\"Derived from from relationship: \"\n                        f\"{relationship.source_entity} -> {relationship.relationship_desc} -> {relationship.target_entity}\"\n                    ),\n                    \"meta\": {\"status\": \"need-revised\"},\n                }\n                entities_data.append(new_target_entity)\n                mapped_entities[relationship.target_entity] = new_target_entity\n                target_entity_description = new_target_entity[\"description\"]\n            else:\n                target_entity_description = mapped_entities[relationship.target_entity][\n                    \"description\"\n                ]\n\n            relationship_dict = {\n                \"source_entity\": relationship.source_entity,\n                \"source_entity_description\": source_entity_description,\n                \"target_entity\": relationship.target_entity,\n                \"target_entity_description\": target_entity_description,\n                \"relationship_desc\": relationship.relationship_desc,\n                \"meta\": {\n                    **extra_meta,\n                },\n            }\n            relationships_data.append(relationship_dict)\n\n        # Create DataFrames for entities and relationships\n        entities_df = pd.DataFrame(entities_data)\n        relationships_df = pd.DataFrame(relationships_data)\n        return entities_df, relationships_df\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/graph_store/__init__.py",
    "content": "from .tidb_graph_store import TiDBGraphStore\nfrom .tidb_graph_editor import TiDBGraphEditor\nfrom .tidb_graph_store import KnowledgeGraphStore\n\n__all__ = [\n    \"TiDBGraphStore\",\n    \"TiDBGraphEditor\",\n    \"KnowledgeGraphStore\",\n]\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/graph_store/helpers.py",
    "content": "import json\nfrom typing import List, Tuple, Mapping, Any\n\nfrom llama_index.embeddings.openai import OpenAIEmbedding, OpenAIEmbeddingModelType\nfrom llama_index.core.base.embeddings.base import BaseEmbedding, Embedding\n\n# The configuration for the weight coefficient\n# format: ((min_weight, max_weight), coefficient)\nDEFAULT_WEIGHT_COEFFICIENT_CONFIG = [\n    ((0, 100), 0.01),\n    ((100, 1000), 0.001),\n    ((1000, 10000), 0.0001),\n    ((10000, float(\"inf\")), 0.00001),\n]\n\n# The configuration for the range search\n# format: ((min_distance, max_distance), seach_ratio)\n# The sum of search ratio should be 1 except some case we want to search as many as possible relationships.\n# In this case, we set the search ratio to 1, and the other search ratio sum should be 1\nDEFAULT_RANGE_SEARCH_CONFIG = [\n    ((0.0, 0.25), 1),\n    ((0.25, 0.35), 0.7),\n    ((0.35, 0.45), 0.2),\n    ((0.45, 0.55), 0.1),\n]\n\nDEFAULT_DEGREE_COEFFICIENT = 0.001\n\n\ndef get_weight_score(\n    weight: int, weight_coefficient_config: List[Tuple[Tuple[int, int], float]]\n) -> float:\n    weight_score = 0.0\n    remaining_weight = weight\n\n    for weight_range, coefficient in weight_coefficient_config:\n        if remaining_weight <= 0:\n            break\n        lower_bound, upper_bound = weight_range\n        applicable_weight = min(upper_bound - lower_bound, remaining_weight)\n        weight_score += applicable_weight * coefficient\n        remaining_weight -= applicable_weight\n\n    return weight_score\n\n\ndef get_degree_score(in_degree: int, out_degree: int, degree_coefficient) -> float:\n    return (in_degree - out_degree) * degree_coefficient\n\n\ndef calculate_relationship_score(\n    embedding_distance: float,\n    weight: int,\n    in_degree: int,\n    out_degree: int,\n    alpha: float,\n    weight_coefficient_config: List[\n        Tuple[Tuple[int, int], float]\n    ] = DEFAULT_WEIGHT_COEFFICIENT_CONFIG,\n    degree_coefficient: float = DEFAULT_DEGREE_COEFFICIENT,\n    with_degree: bool = False,\n) -> float:\n    weighted_score = get_weight_score(weight, weight_coefficient_config)\n    degree_score = 0\n    if with_degree:\n        degree_score = get_degree_score(in_degree, out_degree, degree_coefficient)\n    return alpha * (1 / embedding_distance) + weighted_score + degree_score\n\n\ndef get_default_embed_model() -> BaseEmbedding:\n    return OpenAIEmbedding(model=OpenAIEmbeddingModelType.TEXT_EMBED_3_SMALL)\n\n\ndef get_query_embedding(query: str, embed_model: BaseEmbedding = None) -> Embedding:\n    if not embed_model:\n        embed_model = get_default_embed_model()\n    return embed_model.get_query_embedding(query)\n\n\ndef get_text_embedding(text: str, embed_model: BaseEmbedding = None) -> Embedding:\n    if not embed_model:\n        embed_model = get_default_embed_model()\n    return embed_model.get_text_embedding(text)\n\n\ndef get_entity_description_embedding(\n    name: str, description: str, embed_model: BaseEmbedding = None\n) -> Embedding:\n    combined_text = f\"{name}: {description}\"\n    return get_text_embedding(combined_text, embed_model)\n\n\ndef get_entity_metadata_embedding(\n    metadata: Mapping[str, Any], embed_model: BaseEmbedding = None\n) -> Embedding:\n    combined_text = json.dumps(metadata, ensure_ascii=False)\n    return get_text_embedding(combined_text, embed_model)\n\n\ndef get_relationship_description_embedding(\n    source_entity_name: str,\n    source_entity_description,\n    target_entity_name: str,\n    target_entity_description: str,\n    relationship_desc: str,\n    embed_model: BaseEmbedding = None,\n):\n    combined_text = (\n        f\"{source_entity_name}({source_entity_description}) -> \"\n        f\"{relationship_desc} -> {target_entity_name}({target_entity_description}) \"\n    )\n    return get_text_embedding(combined_text, embed_model)\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/graph_store/schema.py",
    "content": "from abc import ABC, abstractmethod\nfrom typing import Dict, Optional, Tuple\n\nfrom sqlmodel import Session\n\n\nclass KnowledgeGraphStore(ABC):\n    @abstractmethod\n    def save(self, entities_df, relationships_df) -> None:\n        \"\"\"Upsert entities and relationships to the graph store.\"\"\"\n        pass\n\n    @abstractmethod\n    def retrieve_with_weight(\n        self,\n        query: str,\n        embedding: list,\n        depth: int = 2,\n        include_meta: bool = False,\n        with_degree: bool = False,\n        relationship_meta_filters: Dict = {},\n        session: Optional[Session] = None,\n    ) -> Tuple[list, list, list]:\n        \"\"\"Retrieve nodes and relationships with weights.\"\"\"\n        pass\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/graph_store/tidb_graph_editor.py",
    "content": "from typing import Optional, Tuple, List, Type\n\nfrom llama_index.core.embeddings import resolve_embed_model\nfrom llama_index.core.embeddings.utils import EmbedType\nfrom llama_index.embeddings.openai import OpenAIEmbedding, OpenAIEmbeddingModelType\nfrom sqlmodel import Session, select, SQLModel\nfrom sqlalchemy.orm import joinedload\nfrom sqlalchemy.orm.attributes import flag_modified\nfrom app.models import EntityType\nfrom app.rag.indices.knowledge_graph.schema import Relationship as RelationshipAIModel\nfrom app.rag.indices.knowledge_graph.graph_store import TiDBGraphStore\nfrom app.rag.indices.knowledge_graph.graph_store.helpers import (\n    get_entity_description_embedding,\n    get_relationship_description_embedding,\n    get_entity_metadata_embedding,\n    get_query_embedding,\n)\nfrom app.staff_action import create_staff_action_log\n\n\n# TODO: CRUD operations should move to TiDBGraphStore\nclass TiDBGraphEditor:\n    _entity_db_model: Type[SQLModel]\n    _relationship_db_model: Type[SQLModel]\n\n    def __init__(\n        self,\n        knowledge_base_id: int,\n        entity_db_model: Type[SQLModel],\n        relationship_db_model: Type[SQLModel],\n        embed_model: Optional[EmbedType] = None,\n    ):\n        self.knowledge_base_id = knowledge_base_id\n        self._entity_db_model = entity_db_model\n        self._relationship_db_model = relationship_db_model\n\n        if embed_model:\n            self._embed_model = resolve_embed_model(embed_model)\n        else:\n            self._embed_model = OpenAIEmbedding(\n                model=OpenAIEmbeddingModelType.TEXT_EMBED_3_SMALL\n            )\n\n    def get_entity(self, session: Session, entity_id: int) -> Optional[SQLModel]:\n        return session.get(self._entity_db_model, entity_id)\n\n    def update_entity(\n        self, session: Session, entity: SQLModel, new_entity: dict\n    ) -> SQLModel:\n        old_entity_dict = entity.screenshot()\n        for key, value in new_entity.items():\n            if value is not None:\n                setattr(entity, key, value)\n                flag_modified(entity, key)\n        entity.description_vec = get_entity_description_embedding(\n            entity.name, entity.description, self._embed_model\n        )\n        entity.meta_vec = get_entity_metadata_embedding(entity.meta, self._embed_model)\n        for relationship in session.exec(\n            select(self._relationship_db_model)\n            .options(\n                joinedload(self._relationship_db_model.source_entity),\n                joinedload(self._relationship_db_model.target_entity),\n            )\n            .where(\n                (self._relationship_db_model.source_entity_id == entity.id)\n                | (self._relationship_db_model.target_entity_id == entity.id)\n            )\n        ):\n            relationship.description_vec = get_relationship_description_embedding(\n                relationship.source_entity.name,\n                relationship.source_entity.description,\n                relationship.target_entity.name,\n                relationship.target_entity.description,\n                relationship.description,\n                self._embed_model,\n            )\n            session.add(relationship)\n        session.commit()\n        session.refresh(entity)\n        new_entity_dict = entity.screenshot()\n        create_staff_action_log(\n            session, \"update\", \"entity\", entity.id, old_entity_dict, new_entity_dict\n        )\n        return entity\n\n    def get_entity_subgraph(\n        self, session: Session, entity: SQLModel\n    ) -> Tuple[list, list]:\n        \"\"\"\n        Get the subgraph of an entity, including all related relationships and entities.\n        \"\"\"\n        relationships_queryset = session.exec(\n            select(self._relationship_db_model)\n            .options(\n                joinedload(self._relationship_db_model.source_entity),\n                joinedload(self._relationship_db_model.target_entity),\n            )\n            .where(\n                (self._relationship_db_model.source_entity_id == entity.id)\n                | (self._relationship_db_model.target_entity_id == entity.id)\n            )\n        )\n        relationships = []\n        entities = []\n        entities_set = set()\n        for relationship in relationships_queryset:\n            entities_set.add(relationship.source_entity)\n            entities_set.add(relationship.target_entity)\n            relationships.append(relationship.screenshot())\n\n        for entity in entities_set:\n            entities.append(entity.screenshot())\n\n        return relationships, entities\n\n    def get_relationship(\n        self, session: Session, relationship_id: int\n    ) -> Optional[SQLModel]:\n        return session.get(self._relationship_db_model, relationship_id)\n\n    def update_relationship(\n        self, session: Session, relationship: SQLModel, new_relationship: dict\n    ) -> SQLModel:\n        old_relationship_dict = relationship.screenshot()\n        for key, value in new_relationship.items():\n            if value is not None:\n                setattr(relationship, key, value)\n                flag_modified(relationship, key)\n        relationship.description_vec = get_relationship_description_embedding(\n            relationship.source_entity.name,\n            relationship.source_entity.description,\n            relationship.target_entity.name,\n            relationship.target_entity.description,\n            relationship.description,\n            self._embed_model,\n        )\n        session.commit()\n        session.refresh(relationship)\n        new_relationship_dict = relationship.screenshot()\n        # FIXME: some error when create staff action log\n        create_staff_action_log(\n            session,\n            \"update\",\n            \"relationship\",\n            relationship.id,\n            old_relationship_dict,\n            new_relationship_dict,\n        )\n        return relationship\n\n    def search_similar_entities(\n        self, session: Session, query: str, top_k: int = 10\n    ) -> list:\n        embedding = get_query_embedding(query, self._embed_model)\n        return session.exec(\n            select(self._entity_db_model)\n            .where(self._entity_db_model.entity_type == EntityType.original)\n            .order_by(self._entity_db_model.description_vec.cosine_distance(embedding))\n            .limit(top_k)\n        ).all()\n\n    def create_synopsis_entity(\n        self,\n        session: Session,\n        name: str,\n        description: str,\n        topic: str,\n        meta: dict,\n        related_entities_ids: List[int],\n    ) -> SQLModel:\n        # with session.begin():\n        synopsis_entity = self._entity_db_model(\n            name=name,\n            description=description,\n            description_vec=get_entity_description_embedding(\n                name, description, self._embed_model\n            ),\n            meta=meta,\n            meta_vec=get_entity_metadata_embedding(meta, self._embed_model),\n            entity_type=EntityType.synopsis,\n            synopsis_info={\n                \"entities\": related_entities_ids,\n                \"topic\": topic,\n            },\n        )\n        session.add(synopsis_entity)\n        graph_store = TiDBGraphStore(\n            knowledge_base=self.knowledge_base_id,\n            dspy_lm=None,\n            session=session,\n            embed_model=self._embed_model,\n            entity_db_model=self._entity_db_model,\n            relationship_db_model=self._relationship_db_model,\n        )\n        for related_entity in session.exec(\n            select(self._entity_db_model).where(\n                self._entity_db_model.id.in_(related_entities_ids)\n            )\n        ).all():\n            graph_store.create_relationship(\n                synopsis_entity,\n                related_entity,\n                RelationshipAIModel(\n                    source_entity=synopsis_entity.name,\n                    target_entity=related_entity.name,\n                    relationship_desc=f\"{related_entity.name} is a part of synopsis entity (name={synopsis_entity.name}, topic={topic})\",\n                ),\n                {\"relationship_type\": EntityType.synopsis.value},\n                commit=False,\n            )\n        session.commit()\n        create_staff_action_log(\n            session,\n            \"create_synopsis_entity\",\n            \"entity\",\n            synopsis_entity.id,\n            {},\n            synopsis_entity.screenshot(),\n            commit=False,\n        )\n        return synopsis_entity\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/graph_store/tidb_graph_store.py",
    "content": "import dspy\nimport logging\nimport numpy as np\nimport tidb_vector\nfrom deepdiff import DeepDiff\nfrom typing import List, Optional, Tuple, Dict, Set, Type, Any\nfrom collections import defaultdict\n\nfrom dspy import Predict\nfrom llama_index.core.embeddings.utils import EmbedType, resolve_embed_model\nfrom llama_index.embeddings.openai import OpenAIEmbedding, OpenAIEmbeddingModelType\nimport sqlalchemy\nfrom sqlmodel import Session, asc, func, select, text, SQLModel\nfrom sqlalchemy.orm import aliased, defer, joinedload, noload\nfrom tidb_vector.sqlalchemy import VectorAdaptor\nfrom sqlalchemy import or_, desc\n\nfrom app.core.db import engine\nfrom app.rag.indices.knowledge_graph.graph_store.helpers import (\n    get_entity_description_embedding,\n    get_relationship_description_embedding,\n    calculate_relationship_score,\n    get_entity_metadata_embedding,\n    get_query_embedding,\n    DEFAULT_RANGE_SEARCH_CONFIG,\n    DEFAULT_WEIGHT_COEFFICIENT_CONFIG,\n    DEFAULT_DEGREE_COEFFICIENT,\n)\nfrom app.rag.indices.knowledge_graph.graph_store.schema import KnowledgeGraphStore\nfrom app.rag.indices.knowledge_graph.schema import (\n    Entity,\n    Relationship,\n    SynopsisEntity,\n)\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    RetrievedEntity,\n    RetrievedRelationship,\n    RetrievedKnowledgeGraph,\n)\nfrom app.models import (\n    KnowledgeBase,\n    EntityType,\n    Document,\n)\n\nlogger = logging.getLogger(__name__)\n\n\ndef cosine_distance(v1, v2):\n    return 1 - np.dot(v1, v2) / (np.linalg.norm(v1) * np.linalg.norm(v2))\n\n\nclass MergeEntities(dspy.Signature):\n    \"\"\"As a knowledge expert assistant specialized in database technologies, evaluate the two provided entities. These entities have been pre-analyzed and have same name but different descriptions and metadata.\n    Please carefully review the detailed descriptions and metadata for both entities to determine if they genuinely represent the same concept or object(entity).\n    If you conclude that the entities are identical, merge the descriptions and metadata fields of the two entities into a single consolidated entity.\n    If the entities are distinct despite their same name that may be due to different contexts or perspectives, do not merge the entities and return none as the merged entity.\n\n    Considerations: Ensure your decision is based on a comprehensive analysis of the content and context provided within the entity descriptions and metadata.\n    Please only response in JSON Format.\n    \"\"\"\n\n    entities: List[Entity] = dspy.InputField(\n        desc=\"List of entities identified from previous analysis.\"\n    )\n    merged_entity: Optional[Entity] = dspy.OutputField(\n        desc=\"Merged entity with consolidated descriptions and metadata.\"\n    )\n\n\nclass MergeEntitiesProgram(dspy.Module):\n    def __init__(self):\n        self.prog = Predict(MergeEntities)\n\n    def forward(self, entities: List[Entity]):\n        if len(entities) != 2:\n            raise ValueError(\"The input should contain exactly two entities\")\n\n        pred = self.prog(entities=entities)\n        return pred\n\n\nclass TiDBGraphStore(KnowledgeGraphStore):\n    def __init__(\n        self,\n        knowledge_base: KnowledgeBase,\n        dspy_lm: dspy.LM,\n        entity_db_model: Type[SQLModel],\n        relationship_db_model: Type[SQLModel],\n        chunk_db_model: Type[SQLModel],\n        session: Optional[Session] = None,\n        embed_model: Optional[EmbedType] = None,\n        description_similarity_threshold=0.9,\n    ):\n        self.knowledge_base = knowledge_base\n        self._session = session\n        self._owns_session = session is None\n        if self._session is None:\n            self._session = Session(engine)\n        self._dspy_lm = dspy_lm\n\n        if embed_model:\n            self._embed_model = resolve_embed_model(embed_model)\n        else:\n            self._embed_model = OpenAIEmbedding(\n                model=OpenAIEmbeddingModelType.TEXT_EMBED_3_SMALL\n            )\n\n        self.merge_entities_prog = MergeEntitiesProgram()\n        self.description_cosine_distance_threshold = (\n            1 - description_similarity_threshold\n        )\n        self._entity_model = entity_db_model\n        self._relationship_model = relationship_db_model\n        self._chunk_model = chunk_db_model\n\n    def ensure_table_schema(self) -> None:\n        inspector = sqlalchemy.inspect(engine)\n        existed_table_names = inspector.get_table_names()\n        entities_table_name = self._entity_model.__tablename__\n        relationships_table_name = self._relationship_model.__tablename__\n\n        if entities_table_name not in existed_table_names:\n            self._entity_model.metadata.create_all(\n                engine, tables=[self._entity_model.__table__]\n            )\n\n            # Add HNSW index to accelerate ann queries.\n            VectorAdaptor(engine).create_vector_index(\n                self._entity_model.description_vec, tidb_vector.DistanceMetric.COSINE\n            )\n            VectorAdaptor(engine).create_vector_index(\n                self._entity_model.meta_vec, tidb_vector.DistanceMetric.COSINE\n            )\n\n            logger.info(\n                f\"Entities table <{entities_table_name}> has been created successfully.\"\n            )\n        else:\n            logger.info(\n                f\"Entities table <{entities_table_name}> is already exists, not action to do.\"\n            )\n\n        if relationships_table_name not in existed_table_names:\n            self._relationship_model.metadata.create_all(\n                engine, tables=[self._relationship_model.__table__]\n            )\n\n            # Add HNSW index to accelerate ann queries.\n            VectorAdaptor(engine).create_vector_index(\n                self._relationship_model.description_vec,\n                tidb_vector.DistanceMetric.COSINE,\n            )\n\n            logger.info(\n                f\"Relationships table <{relationships_table_name}> has been created successfully.\"\n            )\n        else:\n            logger.info(\n                f\"Relationships table <{relationships_table_name}> is already exists, not action to do.\"\n            )\n\n    def drop_table_schema(self) -> None:\n        inspector = sqlalchemy.inspect(engine)\n        existed_table_names = inspector.get_table_names()\n        relationships_table_name = self._relationship_model.__tablename__\n        entities_table_name = self._entity_model.__tablename__\n\n        if relationships_table_name in existed_table_names:\n            self._relationship_model.metadata.drop_all(\n                engine, tables=[self._relationship_model.__table__]\n            )\n            logger.info(\n                f\"Relationships table <{relationships_table_name}> has been dropped successfully.\"\n            )\n        else:\n            logger.info(\n                f\"Relationships table <{relationships_table_name}> is not existed, not action to do.\"\n            )\n\n        if entities_table_name in existed_table_names:\n            self._entity_model.metadata.drop_all(\n                engine, tables=[self._entity_model.__table__]\n            )\n            logger.info(\n                f\"Entities table <{entities_table_name}> has been dropped successfully.\"\n            )\n        else:\n            logger.info(\n                f\"Entities table <{entities_table_name}> is not existed, not action to do.\"\n            )\n\n    def close_session(self) -> None:\n        # Always call this method is necessary to make sure the session is closed\n        if self._owns_session:\n            self._session.close()\n\n    def save(self, chunk_id, entities_df, relationships_df):\n        if entities_df.empty or relationships_df.empty:\n            logger.info(\n                \"Entities or relationships are empty, skip saving to the database\"\n            )\n            return\n\n        if (\n            self._session.exec(\n                select(self._relationship_model).where(\n                    self._relationship_model.meta[\"chunk_id\"] == chunk_id\n                )\n            ).first()\n            is not None\n        ):\n            logger.info(f\"{chunk_id} already exists in the relationship table, skip.\")\n            return\n\n        entities_name_map = defaultdict(list)\n        for _, row in entities_df.iterrows():\n            entities_name_map[row[\"name\"]].append(\n                self.get_or_create_entity(\n                    Entity(\n                        name=row[\"name\"],\n                        description=row[\"description\"],\n                        metadata=row[\"meta\"],\n                    ),\n                    commit=False,\n                )\n            )\n\n        def _find_or_create_entity_for_relation(\n            name: str, description: str\n        ) -> SQLModel:\n            _embedding = get_entity_description_embedding(\n                name, description, self._embed_model\n            )\n            # Check entities_name_map first, if not found, then check the database\n            for e in entities_name_map.get(name, []):\n                if (\n                    cosine_distance(e.description_vec, _embedding)\n                    < self.description_cosine_distance_threshold\n                ):\n                    return e\n            return self.get_or_create_entity(\n                Entity(\n                    name=name,\n                    description=description,\n                    metadata={\"status\": \"need-revised\"},\n                ),\n                commit=False,\n            )\n\n        try:\n            for _, row in relationships_df.iterrows():\n                logger.info(\n                    \"save entities for relationship %s -> %s -> %s\",\n                    row[\"source_entity\"],\n                    row[\"relationship_desc\"],\n                    row[\"target_entity\"],\n                )\n                source_entity = _find_or_create_entity_for_relation(\n                    row[\"source_entity\"], row[\"source_entity_description\"]\n                )\n                target_entity = _find_or_create_entity_for_relation(\n                    row[\"target_entity\"], row[\"target_entity_description\"]\n                )\n\n                self.create_relationship(\n                    source_entity,\n                    target_entity,\n                    Relationship(\n                        source_entity=source_entity.name,\n                        target_entity=target_entity.name,\n                        relationship_desc=row[\"relationship_desc\"],\n                    ),\n                    relationship_metadata=row[\"meta\"],\n                    commit=False,\n                )\n\n            self._session.commit()\n        except Exception as e:\n            logger.error(e, exc_info=True)\n            self._session.rollback()\n            raise e\n\n    def create_relationship(\n        self,\n        source_entity: SQLModel,\n        target_entity: SQLModel,\n        relationship: Relationship,\n        relationship_metadata: dict = {},\n        commit=True,\n    ):\n        relationship_object = self._relationship_model(\n            source_entity=source_entity,\n            target_entity=target_entity,\n            description=relationship.relationship_desc,\n            description_vec=get_relationship_description_embedding(\n                source_entity.name,\n                source_entity.description,\n                target_entity.name,\n                target_entity.description,\n                relationship.relationship_desc,\n                self._embed_model,\n            ),\n            meta=relationship_metadata,\n            document_id=relationship_metadata.get(\"document_id\"),\n            chunk_id=relationship_metadata.get(\"chunk_id\"),\n        )\n        self._session.add(relationship_object)\n        if commit:\n            self._session.commit()\n            self._session.refresh(relationship_object)\n        else:\n            self._session.flush()\n\n    def get_subgraph_by_relationship_ids(\n        self, ids: list[int], **kwargs\n    ) -> RetrievedKnowledgeGraph:\n        stmt = (\n            select(self._relationship_model)\n            .where(self._relationship_model.id.in_(ids))\n            .options(\n                joinedload(self._relationship_model.source_entity),\n                joinedload(self._relationship_model.target_entity),\n            )\n        )\n        relationships_set = self._session.exec(stmt)\n        entities_set = set()\n        relationships = []\n        entities = []\n\n        for rel in relationships_set:\n            entities_set.add(rel.source_entity)\n            entities_set.add(rel.target_entity)\n            relationships.append(\n                RetrievedRelationship(\n                    id=rel.id,\n                    knowledge_base_id=self.knowledge_base.id,\n                    source_entity_id=rel.source_entity_id,\n                    target_entity_id=rel.target_entity_id,\n                    description=rel.description,\n                    rag_description=f\"{rel.source_entity.name} -> {rel.description} -> {rel.target_entity.name}\",\n                    meta=rel.meta,\n                    weight=rel.weight,\n                    last_modified_at=rel.last_modified_at,\n                )\n            )\n\n        for entity in entities_set:\n            entities.append(\n                RetrievedEntity(\n                    id=entity.id,\n                    knowledge_base_id=self.knowledge_base.id,\n                    name=entity.name,\n                    description=entity.description,\n                    meta=entity.meta,\n                    entity_type=entity.entity_type,\n                )\n            )\n\n        return RetrievedKnowledgeGraph(\n            knowledge_base=self.knowledge_base.to_descriptor(),\n            entities=entities,\n            relationships=relationships,\n            **kwargs,\n        )\n\n    def get_or_create_entity(self, entity: Entity, commit: bool = True) -> SQLModel:\n        # using the cosine distance between the description vectors to determine if the entity already exists\n        entity_type = (\n            EntityType.synopsis\n            if isinstance(entity, SynopsisEntity)\n            else EntityType.original\n        )\n        entity_description_vec = get_entity_description_embedding(\n            entity.name,\n            entity.description,\n            self._embed_model,\n        )\n        hint = text(\n            f\"/*+ read_from_storage(tikv[{self._entity_model.__tablename__}]) */\"\n        )\n        result = (\n            self._session.query(\n                self._entity_model,\n                self._entity_model.description_vec.cosine_distance(\n                    entity_description_vec\n                ).label(\"distance\"),\n            )\n            .filter(\n                self._entity_model.name == entity.name\n                and self._entity_model.entity_type == entity_type\n            )\n            .prefix_with(hint)\n            .order_by(asc(\"distance\"))\n            .first()\n        )\n        if (\n            result is not None\n            and result[1] < self.description_cosine_distance_threshold\n        ):\n            db_obj = result[0]\n            ob_obj_metadata = db_obj.meta\n            if (\n                db_obj.description == entity.description\n                and db_obj.name == entity.name\n                and len(DeepDiff(ob_obj_metadata, entity.metadata)) == 0\n            ):\n                return db_obj\n            elif entity_type == EntityType.original:\n                # TODO: move to TiDBKnowledgeGraphIndex\n                # use LLM to merge the most similar entities\n                merged_entity = self._try_merge_entities(\n                    [\n                        Entity(\n                            name=db_obj.name,\n                            description=db_obj.description,\n                            metadata=ob_obj_metadata,\n                        ),\n                        Entity(\n                            name=entity.name,\n                            description=entity.description,\n                            metadata=entity.metadata,\n                        ),\n                    ]\n                )\n                if merged_entity is not None:\n                    db_obj.description = merged_entity.description\n                    db_obj.meta = merged_entity.metadata\n                    db_obj.description_vec = get_entity_description_embedding(\n                        db_obj.name, db_obj.description, self._embed_model\n                    )\n                    db_obj.meta_vec = get_entity_metadata_embedding(\n                        db_obj.meta, self._embed_model\n                    )\n\n                    self._session.add(db_obj)\n                    if commit:\n                        self._session.commit()\n                        self._session.refresh(db_obj)\n                    else:\n                        self._session.flush()\n                    return db_obj\n\n        synopsis_info_str = (\n            entity.group_info.model_dump()\n            if entity_type == EntityType.synopsis\n            else None\n        )\n\n        db_obj = self._entity_model(\n            name=entity.name,\n            description=entity.description,\n            description_vec=entity_description_vec,\n            meta=entity.metadata,\n            meta_vec=get_entity_metadata_embedding(entity.metadata, self._embed_model),\n            synopsis_info=synopsis_info_str,\n            entity_type=entity_type,\n        )\n        self._session.add(db_obj)\n        if commit:\n            self._session.commit()\n            self._session.refresh(db_obj)\n        else:\n            self._session.flush()\n\n        return db_obj\n\n    def _try_merge_entities(self, entities: List[Entity]) -> Entity:\n        logger.info(f\"Trying to merge entities: {entities[0].name}\")\n        try:\n            with dspy.settings.context(lm=self._dspy_lm):\n                pred = self.merge_entities_prog(entities=entities)\n                return pred.merged_entity\n        except Exception as e:\n            logger.error(f\"Failed to merge entities: {e}\", exc_info=True)\n            return None\n\n    def retrieve_with_weight(\n        self,\n        query: str,\n        embedding: list,\n        depth: int = 2,\n        include_meta: bool = False,\n        with_degree: bool = False,\n        # experimental feature to filter relationships based on meta, can be removed in the future\n        relationship_meta_filters: dict = {},\n        session: Optional[Session] = None,\n    ) -> Tuple[List[RetrievedEntity], List[RetrievedRelationship]]:\n        if not embedding:\n            assert query, \"Either query or embedding must be provided\"\n            embedding = get_query_embedding(query, self._embed_model)\n\n        relationships, entities = self.search_relationships_weight(\n            embedding,\n            [],\n            [],\n            with_degree=with_degree,\n            relationship_meta_filters=relationship_meta_filters,\n            session=session,\n        )\n\n        all_relationships = set(relationships)\n        all_entities = set(entities)\n        visited_entities = set(e.id for e in entities)\n        visited_relationships = set(r.id for r in relationships)\n\n        for _ in range(depth - 1):\n            actual_number = 0\n            progress = 0\n            search_number_each_depth = 10\n            for search_config in DEFAULT_RANGE_SEARCH_CONFIG:\n                search_ratio = search_config[1]\n                search_distance_range = search_config[0]\n                remaining_number = search_number_each_depth - actual_number\n                # calculate the expected number based search progress\n                # It's a accumulative search, so the expected number should be the difference between the expected number and the actual number\n                expected_number = (\n                    int(\n                        (search_ratio + progress) * search_number_each_depth\n                        - actual_number\n                    )\n                    if progress * search_number_each_depth > actual_number\n                    else int(search_ratio * search_number_each_depth)\n                )\n                if expected_number > remaining_number:\n                    expected_number = remaining_number\n                if remaining_number <= 0:\n                    break\n\n                new_relationships, new_entities = self.search_relationships_weight(\n                    embedding,\n                    visited_relationships,\n                    visited_entities,\n                    search_distance_range,\n                    rank_n=expected_number,\n                    with_degree=with_degree,\n                    relationship_meta_filters=relationship_meta_filters,\n                    session=session,\n                )\n\n                all_relationships.update(new_relationships)\n                all_entities.update(new_entities)\n\n                visited_entities.update(e.id for e in new_entities)\n                visited_relationships.update(r.id for r in new_relationships)\n                actual_number += len(new_relationships)\n                # search_ratio == 1 won't count the progress\n                if search_ratio != 1:\n                    progress += search_ratio\n\n        synopsis_entities = self.fetch_similar_entities(\n            embedding, top_k=2, entity_type=EntityType.synopsis, session=session\n        )\n        all_entities.update(synopsis_entities)\n\n        related_doc_ids = set()\n        for r in all_relationships:\n            if \"doc_id\" not in r.meta:\n                continue\n            related_doc_ids.add(r.meta[\"doc_id\"])\n\n        entities = [\n            RetrievedEntity(\n                id=e.id,\n                knowledge_base_id=self.knowledge_base.id,\n                name=e.name,\n                description=e.description,\n                meta=e.meta if include_meta else None,\n                entity_type=e.entity_type,\n            )\n            for e in all_entities\n        ]\n        relationships = [\n            RetrievedRelationship(\n                id=r.id,\n                knowledge_base_id=self.knowledge_base.id,\n                source_entity_id=r.source_entity_id,\n                target_entity_id=r.target_entity_id,\n                rag_description=f\"{r.source_entity.name} -> {r.description} -> {r.target_entity.name}\",\n                description=r.description,\n                meta=r.meta,\n                weight=r.weight,\n                last_modified_at=r.last_modified_at,\n            )\n            for r in all_relationships\n        ]\n\n        return entities, relationships\n\n    # Function to fetch degrees for entities\n    def fetch_entity_degrees(\n        self,\n        entity_ids: List[int],\n        session: Optional[Session] = None,\n    ) -> Dict[int, Dict[str, int]]:\n        degrees = {\n            entity_id: {\"in_degree\": 0, \"out_degree\": 0} for entity_id in entity_ids\n        }\n        session = session or self._session\n\n        try:\n            # Fetch out-degrees\n            out_degree_query = (\n                session.query(\n                    self._relationship_model.source_entity_id,\n                    func.count(self._relationship_model.id).label(\"out_degree\"),\n                )\n                .filter(self._relationship_model.source_entity_id.in_(entity_ids))\n                .group_by(self._relationship_model.source_entity_id)\n            ).all()\n\n            for row in out_degree_query:\n                degrees[row.source_entity_id][\"out_degree\"] = row.out_degree\n\n            # Fetch in-degrees\n            in_degree_query = (\n                session.query(\n                    self._relationship_model.target_entity_id,\n                    func.count(self._relationship_model.id).label(\"in_degree\"),\n                )\n                .filter(self._relationship_model.target_entity_id.in_(entity_ids))\n                .group_by(self._relationship_model.target_entity_id)\n            ).all()\n\n            for row in in_degree_query:\n                degrees[row.target_entity_id][\"in_degree\"] = row.in_degree\n        except Exception as e:\n            logger.error(e)\n\n        return degrees\n\n    def search_relationships_weight(\n        self,\n        embedding: List[float],\n        visited_relationships: Set[int],\n        visited_entities: Set[int],\n        distance_range: Tuple[float, float] = (0.0, 1.0),\n        limit: int = 100,\n        weight_coefficient_config: List[\n            Tuple[Tuple[int, int], float]\n        ] = DEFAULT_WEIGHT_COEFFICIENT_CONFIG,\n        alpha: float = 1,\n        rank_n: int = 10,\n        degree_coefficient: float = DEFAULT_DEGREE_COEFFICIENT,\n        with_degree: bool = False,\n        relationship_meta_filters: Dict = {},\n        session: Optional[Session] = None,\n    ) -> Tuple[List[SQLModel], List[SQLModel]]:\n        # select the relationships to rank\n        subquery = (\n            select(\n                self._relationship_model,\n                self._relationship_model.description_vec.cosine_distance(\n                    embedding\n                ).label(\"embedding_distance\"),\n            )\n            .options(defer(self._relationship_model.description_vec))\n            .order_by(asc(\"embedding_distance\"))\n            .limit(limit * 10)\n        ).subquery()\n\n        relationships_alias = aliased(self._relationship_model, subquery)\n\n        query = (\n            select(relationships_alias, text(\"embedding_distance\"))\n            .options(\n                defer(relationships_alias.description_vec),\n                joinedload(relationships_alias.source_entity)\n                .defer(self._entity_model.meta_vec)\n                .defer(self._entity_model.description_vec),\n                joinedload(relationships_alias.target_entity)\n                .defer(self._entity_model.meta_vec)\n                .defer(self._entity_model.description_vec),\n            )\n            .where(relationships_alias.weight >= 0)\n        )\n\n        if relationship_meta_filters:\n            for k, v in relationship_meta_filters.items():\n                query = query.where(relationships_alias.meta[k] == v)\n\n        if visited_relationships:\n            query = query.where(subquery.c.id.notin_(visited_relationships))\n\n        if distance_range != (0.0, 1.0):\n            # embedding_distance between the range\n            query = query.where(\n                text(\n                    \"embedding_distance >= :min_distance AND embedding_distance <= :max_distance\"\n                )\n            ).params(min_distance=distance_range[0], max_distance=distance_range[1])\n\n        if visited_entities:\n            query = query.where(subquery.c.source_entity_id.in_(visited_entities))\n\n        query = query.order_by(asc(\"embedding_distance\")).limit(limit)\n\n        # Order by embedding distance and apply limit\n        session = session or self._session\n        relationships = session.exec(query).all()\n\n        if len(relationships) <= rank_n:\n            relationship_set = set([rel for rel, _ in relationships])\n            entity_set = set()\n            for r in relationship_set:\n                entity_set.add(r.source_entity)\n                entity_set.add(r.target_entity)\n            return relationship_set, entity_set\n\n        # Fetch degrees if with_degree is True\n        if with_degree:\n            entity_ids = set()\n            for rel, _ in relationships:\n                entity_ids.add(rel.source_entity_id)\n                entity_ids.add(rel.target_entity_id)\n            degrees = self.fetch_entity_degrees(list(entity_ids), session=session)\n        else:\n            degrees = {}\n\n        # calculate the relationship score based on distance and weight\n        ranked_relationships = []\n        for relationship, embedding_distance in relationships:\n            source_in_degree = (\n                degrees[relationship.source_entity_id][\"in_degree\"]\n                if with_degree\n                else 0\n            )\n            target_out_degree = (\n                degrees[relationship.target_entity_id][\"out_degree\"]\n                if with_degree\n                else 0\n            )\n            final_score = calculate_relationship_score(\n                embedding_distance,\n                relationship.weight,\n                source_in_degree,\n                target_out_degree,\n                alpha,\n                weight_coefficient_config,\n                degree_coefficient,\n                with_degree,\n            )\n            ranked_relationships.append((relationship, final_score))\n\n        # rank relationships based on the calculated score\n        ranked_relationships.sort(key=lambda x: x[1], reverse=True)\n        relationship_set = set([rel for rel, score in ranked_relationships[:rank_n]])\n        entity_set = set()\n        for r in relationship_set:\n            entity_set.add(r.source_entity)\n            entity_set.add(r.target_entity)\n\n        return list(relationship_set), list(entity_set)\n\n    def fetch_similar_entities_by_post_filter(\n        self,\n        embedding: list,\n        top_k: int = 5,\n        entity_type: EntityType = EntityType.original,\n        session: Optional[Session] = None,\n        post_filter_multiplier: int = 10,\n    ):\n        new_entity_set = set()\n        session = session or self._session\n\n        # Create a subquery with a larger limit and include the distance\n        subquery = (\n            select(\n                self._entity_model,\n                self._entity_model.description_vec.cosine_distance(embedding).label(\n                    \"distance\"\n                ),\n            )\n            .order_by(asc(\"distance\"))\n            .limit(\n                post_filter_multiplier * top_k\n                if entity_type != EntityType.original\n                else top_k\n            )\n            .subquery()\n        )\n\n        # Apply filter only for non-original entity types\n        query = (\n            select(self._entity_model)\n            .where(subquery.c.entity_type == entity_type)\n            .order_by(asc(subquery.c.distance))\n            .limit(top_k)\n        )\n\n        for row in session.exec(query).all():\n            new_entity_set.add(row)\n\n        return new_entity_set\n\n    def fetch_similar_entities(\n        self,\n        embedding: list,\n        top_k: int = 10,\n        entity_type: EntityType = EntityType.original,\n        session: Optional[Session] = None,\n    ):\n        new_entity_set = set()\n\n        # Retrieve entities based on their ID and similarity to the embedding\n        session = session or self._session\n\n        query = select(self._entity_model)\n\n        if entity_type == EntityType.synopsis:\n            query = query.where(self._entity_model.entity_type == entity_type)\n            hint = text(\"/*+ read_from_storage(tikv[entities]) */\")\n            query = query.prefix_with(hint)\n\n        query = query.order_by(\n            self._entity_model.description_vec.cosine_distance(embedding)\n        ).limit(top_k)\n\n        # Debug: Print the SQL query\n        \"\"\"\n        from sqlalchemy.dialects import mysql\n        compiled_query = query.compile(\n            dialect=mysql.dialect(), compile_kwargs={\"literal_binds\": True}\n        )\n        print(f\"Debug - SQL Query: {compiled_query}\")\n        \"\"\"\n\n        for entity in session.exec(query).all():\n            new_entity_set.add(entity)\n\n        return new_entity_set\n\n    def retrieve_graph_data(\n        self,\n        query_text: str,\n        top_k: int = 5,\n        similarity_threshold: float = 0.7,\n    ) -> Dict[str, List[Dict[str, Any]]]:\n        \"\"\"Retrieve related entities and relationships using semantic search.\n\n        Args:\n            query_text: The search query text\n            top_k: Maximum number of results to return for each type\n            similarity_threshold: Minimum similarity score threshold\n\n        Returns:\n            Dictionary containing:\n            - entities: List of similar entities with similarity scores\n            - relationships: List of similar relationships with similarity scores\n        \"\"\"\n        query_embedding = get_query_embedding(query_text, self._embed_model)\n\n        # Query similar entities\n        entity_query = (\n            select(\n                self._entity_model,\n                (\n                    1\n                    - self._entity_model.description_vec.cosine_distance(\n                        query_embedding\n                    )\n                ).label(\"similarity\"),\n            )\n            .options(\n                defer(self._entity_model.description_vec),\n                defer(self._entity_model.meta_vec),\n            )\n            .order_by(desc(\"similarity\"))\n            .limit(top_k * 2)  # Fetch more results to account for filtering\n        )\n\n        # Query similar relationships\n        relationship_query = (\n            select(\n                self._relationship_model,\n                (\n                    1\n                    - self._relationship_model.description_vec.cosine_distance(\n                        query_embedding\n                    )\n                ).label(\"similarity\"),\n            )\n            .options(\n                defer(self._relationship_model.description_vec),\n                joinedload(self._relationship_model.source_entity)\n                .defer(self._entity_model.meta_vec)\n                .defer(self._entity_model.description_vec),\n                joinedload(self._relationship_model.target_entity)\n                .defer(self._entity_model.meta_vec)\n                .defer(self._entity_model.description_vec),\n            )\n            .order_by(desc(\"similarity\"))\n            .limit(top_k * 2)  # Fetch more results to account for filtering\n        )\n\n        # Execute both queries\n        entities = []\n        relationships = []\n\n        for entity, similarity in self._session.exec(entity_query).all():\n            if similarity >= similarity_threshold and len(entities) < top_k:\n                entities.append(\n                    {\n                        \"id\": entity.id,\n                        \"name\": entity.name,\n                        \"description\": entity.description,\n                        \"metadata\": entity.meta,\n                        \"similarity_score\": similarity,\n                    }\n                )\n\n        for relationship, similarity in self._session.exec(relationship_query).all():\n            if similarity >= similarity_threshold and len(relationships) < top_k:\n                relationships.append(\n                    {\n                        \"id\": relationship.id,\n                        \"relationship\": relationship.description,\n                        \"source_entity\": {\n                            \"id\": relationship.source_entity.id,\n                            \"name\": relationship.source_entity.name,\n                            \"description\": relationship.source_entity.description,\n                        },\n                        \"target_entity\": {\n                            \"id\": relationship.target_entity.id,\n                            \"name\": relationship.target_entity.name,\n                            \"description\": relationship.target_entity.description,\n                        },\n                        \"similarity_score\": similarity,\n                    }\n                )\n\n        return {\"entities\": entities, \"relationships\": relationships}\n\n    def retrieve_neighbors(\n        self,\n        entities_ids: List[int],\n        query: str,\n        max_depth: int = 1,\n        max_neighbors: int = 20,\n        similarity_threshold: float = 0.7,\n    ) -> Dict[str, List[Dict]]:\n        \"\"\"Retrieve most relevant neighbor paths for a group of similar nodes.\n\n        Args:\n            node_ids: List of source node IDs (representing similar entities)\n            query: Search query for relevant relationships\n            max_depth: Maximum depth for relationship traversal\n            max_neighbors: Maximum number of total neighbor paths to return\n            similarity_threshold: Minimum similarity score threshold\n\n        Returns:\n            Dictionary containing most relevant paths from source nodes to neighbors\n        \"\"\"\n        query_embedding = get_query_embedding(query, self._embed_model)\n        # Get all source entities\n        source_entities = self._session.exec(\n            select(self._entity_model)\n            .options(\n                defer(self._entity_model.description_vec),\n                defer(self._entity_model.meta_vec),\n            )\n            .where(self._entity_model.id.in_(entities_ids))\n        ).all()\n\n        # Track visited nodes and discovered paths\n        all_visited = set(entities_ids)\n        current_level_nodes = set(entities_ids)\n        neighbors = []  # Store all discovered paths with their relevance scores\n\n        for depth in range(max_depth):\n            if not current_level_nodes:\n                break\n\n            # Query relationships for current level\n            relationships = self._session.exec(\n                select(\n                    self._relationship_model,\n                    (\n                        1\n                        - self._relationship_model.description_vec.cosine_distance(\n                            query_embedding\n                        )\n                    ).label(\"similarity\"),\n                )\n                .options(\n                    defer(self._relationship_model.description_vec),\n                    joinedload(self._relationship_model.source_entity)\n                    .defer(self._entity_model.meta_vec)\n                    .defer(self._entity_model.description_vec),\n                    joinedload(self._relationship_model.target_entity)\n                    .defer(self._entity_model.meta_vec)\n                    .defer(self._entity_model.description_vec),\n                )\n                .where(\n                    or_(\n                        self._relationship_model.source_entity_id.in_(\n                            current_level_nodes\n                        ),\n                        self._relationship_model.target_entity_id.in_(\n                            current_level_nodes\n                        ),\n                    )\n                )\n                .order_by(desc(\"similarity\"))\n                .limit(max_neighbors * 2)  # Fetch more results to account for filtering\n            ).all()\n\n            next_level_nodes = set()\n\n            for rel, similarity in relationships:\n                # Skip if similarity is below threshold\n                if similarity < similarity_threshold:\n                    continue\n\n                # Determine direction and connected entity\n                if rel.source_entity_id in current_level_nodes:\n                    connected_id = rel.target_entity_id\n                else:\n                    connected_id = rel.source_entity_id\n\n                # Skip if already visited\n                if connected_id in all_visited:\n                    continue\n\n                neighbors.append(\n                    {\n                        \"id\": rel.id,\n                        \"relationship\": rel.description,\n                        \"source_entity\": {\n                            \"id\": rel.source_entity.id,\n                            \"name\": rel.source_entity.name,\n                            \"description\": rel.source_entity.description,\n                        },\n                        \"target_entity\": {\n                            \"id\": rel.target_entity.id,\n                            \"name\": rel.target_entity.name,\n                            \"description\": rel.target_entity.description,\n                        },\n                        \"similarity_score\": similarity,\n                    }\n                )\n                next_level_nodes.add(connected_id)\n                all_visited.add(connected_id)\n\n            current_level_nodes = next_level_nodes\n\n        # Sort all paths by similarity score and return top max_neighbors\n        neighbors.sort(key=lambda x: x[\"similarity_score\"], reverse=True)\n\n        return {\"relationships\": neighbors[:max_neighbors]}\n\n    def get_chunks_by_relationships(\n        self,\n        relationships_ids: List[int],\n        session: Optional[Session] = None,\n    ) -> List[Dict[str, Any]]:\n        \"\"\"Get chunks for a list of relationships.\n\n        Args:\n            relationships: List of relationship objects\n            session: Optional database session\n\n        Returns:\n            List of dictionaries containing chunk information:\n            - text: chunk text content\n            - document_id: associated document id\n            - meta: chunk metadata\n        \"\"\"\n        session = session or self._session\n\n        relationships = session.exec(\n            select(self._relationship_model).where(\n                self._relationship_model.id.in_(relationships_ids)\n            )\n        ).all()\n\n        # Extract chunk IDs from relationships\n        chunk_ids = {\n            rel.meta.get(\"chunk_id\")\n            for rel in relationships\n            if rel.meta.get(\"chunk_id\") is not None\n        }\n\n        if chunk_ids:\n            logger.info(f\"Getting chunks for relationships: {chunk_ids}\")\n            # Query chunks\n            chunks = session.exec(\n                select(self._chunk_model).where(self._chunk_model.id.in_(chunk_ids))\n            ).all()\n\n            return [\n                {\n                    \"id\": chunk.id,\n                    \"text\": chunk.text,\n                    \"document_id\": chunk.document_id,\n                    \"meta\": {\n                        \"language\": chunk.meta.get(\"language\"),\n                        \"product\": chunk.meta.get(\"product\"),\n                        \"resource\": chunk.meta.get(\"resource\"),\n                        \"source_uri\": chunk.meta.get(\"source_uri\"),\n                        \"tidb_version\": chunk.meta.get(\"tidb_version\"),\n                    },\n                }\n                for chunk in chunks\n            ]\n\n        document_ids = {\n            rel.document_id\n            for rel in relationships\n            if rel.document_id is not None\n        }\n\n        logger.info(f\"Getting documents for relationships: {document_ids}\")\n\n        documents = session.exec(\n            select(Document).where(Document.id.in_(document_ids))\n        ).all()\n\n        return [\n            {\n                \"id\": doc.id,\n                \"text\": doc.content,\n                \"document_id\": doc.id,\n                \"meta\": doc.meta,\n            }\n            for doc in documents\n        ]\n\n    def get_entire_knowledge_graph(self) -> RetrievedKnowledgeGraph:\n        \"\"\"Retrieve all entities and relationships from the knowledge graph store.\n\n        Returns:\n            RetrievedKnowledgeGraph containing all entities and relationships\n        \"\"\"\n        # Query all entities\n        entity_query = select(self._entity_model).order_by(self._entity_model.id)\n        db_entities = self._session.exec(entity_query).all()\n\n        # Query all relationships with their related entities\n        relationship_query = (\n            select(self._relationship_model)\n            .options(\n                joinedload(self._relationship_model.source_entity),\n                joinedload(self._relationship_model.target_entity),\n            )\n            .order_by(self._relationship_model.id)\n        )\n        db_relationships = self._session.exec(relationship_query).all()\n\n        # Convert entities to RetrievedEntity objects\n        entities = []\n        for entity in db_entities:\n            entities.append(\n                RetrievedEntity(\n                    id=entity.id,\n                    knowledge_base_id=self.knowledge_base.id,\n                    name=entity.name,\n                    description=entity.description,\n                    meta=entity.meta,\n                    entity_type=entity.entity_type,\n                )\n            )\n\n        # Convert relationships to RetrievedRelationship objects\n        relationships = []\n        for rel in db_relationships:\n            relationships.append(\n                RetrievedRelationship(\n                    id=rel.id,\n                    knowledge_base_id=self.knowledge_base.id,\n                    source_entity_id=rel.source_entity_id,\n                    target_entity_id=rel.target_entity_id,\n                    description=rel.description,\n                    rag_description=f\"{rel.source_entity.name} -> {rel.description} -> {rel.target_entity.name}\",\n                    meta=rel.meta,\n                    weight=rel.weight,\n                    last_modified_at=rel.last_modified_at,\n                )\n            )\n        return RetrievedKnowledgeGraph(\n            knowledge_base=self.knowledge_base.to_descriptor(),\n            entities=entities,\n            relationships=relationships,\n        )\n\n    def stream_entire_knowledge_graph(self, chunk_size: int = 5000):\n        \"\"\"Stream entire knowledge graph in chunks\n        \n        Args:\n            chunk_size: Number of entities/relationships per chunk\n            \n        Yields:\n            Dict containing chunk type and data\n        \"\"\"\n        # Stream entities\n        entity_query = (\n            select(self._entity_model)\n            .options(\n                defer(self._entity_model.description_vec),\n                defer(self._entity_model.meta_vec),\n            )\n            .order_by(self._entity_model.id)\n        )\n        last_entity_id = 0\n        \n        while True:\n            chunk_query = entity_query.where(\n                self._entity_model.id > last_entity_id\n            ).limit(chunk_size)\n            db_entities = self._session.exec(chunk_query).all()\n            \n            if not db_entities:\n                break\n                \n            entities = []\n            for entity in db_entities:\n                entities.append(\n                    RetrievedEntity(\n                        id=entity.id,\n                        knowledge_base_id=self.knowledge_base.id,\n                        name=entity.name,\n                        description=entity.description,\n                        meta=entity.meta,\n                        entity_type=entity.entity_type,\n                    )\n                )\n            \n            last_entity_id = db_entities[-1].id\n            yield {\"type\": \"entities\", \"data\": entities}\n        \n        # Stream relationships\n        relationship_query = (\n            select(self._relationship_model)\n            .options(\n                defer(self._relationship_model.description_vec),\n                defer(self._relationship_model.chunk_id),\n                noload(self._relationship_model.source_entity),\n                noload(self._relationship_model.target_entity),\n            )\n            .order_by(self._relationship_model.id)\n        )\n        logger.info(f\"Relationship query: {relationship_query}\")\n        last_relationship_id = 0\n        \n        while True:\n            chunk_query = relationship_query.where(\n                self._relationship_model.id > last_relationship_id\n            ).limit(chunk_size)\n            logger.info(f\"Executing relationship chunk query: {chunk_query}\")\n            db_relationships = self._session.exec(chunk_query).all()\n            \n            if not db_relationships:\n                break\n                \n            relationships = []\n            for rel in db_relationships:\n                relationships.append(\n                    RetrievedRelationship(\n                        id=rel.id,\n                        knowledge_base_id=self.knowledge_base.id,\n                        source_entity_id=rel.source_entity_id,\n                        target_entity_id=rel.target_entity_id,\n                        description=rel.description,\n                        rag_description=None,  # Skip rag_description for streaming performance\n                        meta=rel.meta,\n                        weight=rel.weight,\n                        last_modified_at=rel.last_modified_at,\n                    )\n                )\n            \n            last_relationship_id = db_relationships[-1].id\n            yield {\"type\": \"relationships\", \"data\": relationships}\n"
  },
  {
    "path": "backend/app/rag/indices/knowledge_graph/schema.py",
    "content": "from pydantic import BaseModel, Field\nfrom typing import Mapping, Any, List\n\n\nclass Entity(BaseModel):\n    \"\"\"List of entities extracted from the text to form the knowledge graph\"\"\"\n\n    name: str = Field(\n        description=\"Name of the entity, it should be a clear and concise term\"\n    )\n    description: str = Field(\n        description=(\n            \"Description of the entity, it should be a complete and comprehensive sentence, not few words. \"\n            \"Sample description of entity 'TiDB in-place upgrade': \"\n            \"'Upgrade TiDB component binary files to achieve upgrade, generally use rolling upgrade method'\"\n        )\n    )\n    metadata: Mapping[str, Any] = Field(\n        description=(\n            \"The covariates (which is a comprehensive json TREE, the first field is always: 'topic') to claim the entity. \"\n        )\n    )\n\n\nclass EntityWithID(Entity):\n    \"\"\"Entity extracted from the text to form the knowledge graph with an ID.\"\"\"\n\n    id: int = Field(description=\"Unique identifier for the entity.\")\n\n\nclass SynopsisInfo(BaseModel):\n    \"\"\"A synopsis corresponds to a group of entities that share the same topic and can contribute to synopsis topic.\"\"\"\n\n    topic: str = Field(\n        description=\"The shared topic of the synopsis, and each entity in the group can contribute factual data from its own perspective.\"\n    )\n    entities: List[int] = Field(\n        description=\"A group of entity(only IDs) that can contribute to the synopsis base on the analysis of entity descriptions and metadata.\"\n    )\n\n\nclass SynopsisEntity(Entity):\n    \"\"\"Unified synopsis entity with comprehensive description and metadata based on the entities group.\"\"\"\n\n    group_info: SynopsisInfo = Field(\n        description=\"Group of entities to be unified into a single synopsis entity.\"\n    )\n\n\nclass ExistingSynopsisEntity(SynopsisEntity):\n    \"\"\"Unified synopsis entity with comprehensive description and metadata based on the entities group.\"\"\"\n\n    id: int = Field(description=\"Unique identifier for the entity.\")\n\n\nclass Relationship(BaseModel):\n    \"\"\"List of relationships extracted from the text to form the knowledge graph\"\"\"\n\n    source_entity: str = Field(\n        description=\"Source entity name of the relationship, it should an existing entity in the Entity list\"\n    )\n    target_entity: str = Field(\n        description=\"Target entity name of the relationship, it should an existing entity in the Entity list\"\n    )\n    relationship_desc: str = Field(\n        description=(\n            \"Description of the relationship, it should be a complete and comprehensive sentence, not few words. \"\n            \"Sample relationship description: 'TiDB will release a new LTS version every 6 months.'\"\n        )\n    )\n\n\nclass RelationshipReasoning(Relationship):\n    \"\"\"Relationship between two entities extracted from the query\"\"\"\n\n    reasoning: str = Field(\n        description=(\n            \"Category reasoning for the relationship, e.g., 'the main conerns of the user', 'the problem the user is facing', 'the user case scenario', etc.\"\n        )\n    )\n\n\nclass KnowledgeGraph(BaseModel):\n    \"\"\"Graph representation of the knowledge for text.\"\"\"\n\n    relationships: List[Relationship] = Field(\n        description=\"List of relationships in the knowledge graph\"\n    )\n    entities: List[Entity] = Field(\n        description=\"List of entities in the knowledge graph\"\n    )\n\n\nclass EntityCovariateInput(BaseModel):\n    \"\"\"List of entities extracted from the text to form the knowledge graph\"\"\"\n\n    name: str = Field(description=\"Name of the entity\")\n    description: str = Field(description=(\"Description of the entity\"))\n\n\nclass EntityCovariateOutput(BaseModel):\n    \"\"\"List of entities extracted from the text to form the knowledge graph\"\"\"\n\n    name: str = Field(description=\"Name of the entity\")\n    description: str = Field(description=(\"Description of the entity\"))\n    covariates: Mapping[str, Any] = Field(\n        description=(\n            \"The attributes (which is a comprehensive json TREE, the first field is always: 'topic') to claim the entity. \"\n        )\n    )\n\n\nclass DecomposedFactors(BaseModel):\n    \"\"\"Decomposed factors extracted from the query to form the knowledge graph\"\"\"\n\n    relationships: List[RelationshipReasoning] = Field(\n        description=\"List of relationships to represent critical concepts and their relationships extracted from the query.\"\n    )\n"
  },
  {
    "path": "backend/app/rag/indices/vector_search/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/indices/vector_search/vector_store/__init__.py",
    "content": "__all__ = [\"TiDBVectorStore\"]\n"
  },
  {
    "path": "backend/app/rag/indices/vector_search/vector_store/tidb_vector_store.py",
    "content": "import logging\nimport tidb_vector\nimport sqlalchemy\n\nfrom typing import Any, List, Optional, Type\nfrom llama_index.core.schema import BaseNode, MetadataMode, TextNode\nfrom llama_index.core.bridge.pydantic import PrivateAttr\nfrom llama_index.core.vector_stores.types import (\n    BasePydanticVectorStore,\n    VectorStoreQuery,\n    VectorStoreQueryResult,\n)\nfrom llama_index.core.vector_stores.utils import (\n    metadata_dict_to_node,\n    node_to_metadata_dict,\n)\nfrom sqlmodel import (\n    SQLModel,\n    Session,\n    delete,\n    select,\n    asc,\n    alias,\n)\nfrom tidb_vector.sqlalchemy import VectorAdaptor\nfrom app.core.db import engine\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef node_to_relation_dict(node: BaseNode) -> dict:\n    relationships = {}\n    for r_type, node_info in node.relationships.items():\n        relationships[r_type.name] = {\n            \"node_id\": node_info.node_id,\n            \"node_type\": node_info.node_type.name,\n            \"meta\": node_info.metadata,\n            \"hash\": node_info.hash,\n        }\n    return relationships\n\n\nclass TiDBVectorStore(BasePydanticVectorStore):\n    _session: Session = PrivateAttr()\n    _owns_session: bool = PrivateAttr()\n    _table_name: str = PrivateAttr()\n    _vector_dimension: int = PrivateAttr()\n\n    stores_text: bool = True\n    flat_metadata: bool = False\n\n    def __init__(\n        self,\n        chunk_db_model: Type[SQLModel],\n        session: Optional[Session] = None,\n        oversampling_factor: int = 1,\n        **kwargs: Any,\n    ) -> None:\n        \"\"\"\n        Args:\n            oversampling_factor (int): The oversampling factor for the similarity search. The higher the factor, the higher recall rate.\n        \"\"\"\n        super().__init__(**kwargs)\n        self._session = session\n        self._owns_session = session is None\n        if self._session is None:\n            self._session = Session(engine)\n\n        self._chunk_db_model = chunk_db_model\n        self._oversampling_factor = oversampling_factor\n\n    def ensure_table_schema(self) -> None:\n        inspector = sqlalchemy.inspect(engine)\n        table_name = self._chunk_db_model.__tablename__\n\n        if table_name not in inspector.get_table_names():\n            self._chunk_db_model.metadata.create_all(\n                engine, tables=[self._chunk_db_model.__table__]\n            )\n\n            # Add HNSW index to accelerate ann queries.\n            VectorAdaptor(engine).create_vector_index(\n                self._chunk_db_model.embedding, tidb_vector.DistanceMetric.COSINE\n            )\n\n            logger.info(f\"Chunk table <{table_name}> has been created successfully.\")\n        else:\n            logger.info(\n                f\"Chunk table <{table_name}> is already exists, no action to do.\"\n            )\n\n    def drop_table_schema(self):\n        inspector = sqlalchemy.inspect(engine)\n        table_name = self._chunk_db_model.__tablename__\n\n        if table_name in inspector.get_table_names():\n            self._chunk_db_model.metadata.drop_all(\n                self._session.connection(), tables=[self._chunk_db_model.__table__]\n            )\n            logger.info(f\"Chunk table <{table_name}> has been dropped successfully.\")\n        else:\n            logger.info(f\"Chunk table <{table_name}> is not existed, not action to do.\")\n\n    def close_session(self) -> None:\n        # Always call this method is necessary to make sure the session is closed\n        if self._owns_session:\n            self._session.close()\n\n    @property\n    def client(self) -> Any:\n        \"\"\"Get client.\"\"\"\n        return engine\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"TiDBVectorStore\"\n\n    def add(self, nodes: List[BaseNode], **add_kwargs: Any) -> List[str]:\n        \"\"\"\n        Add nodes to the vector store.\n\n        Args:\n            nodes (List[BaseNode]): List of nodes to be added.\n            **add_kwargs: Additional keyword arguments to be passed to the underlying storage.\n\n        Returns:\n            List[str]: List of node IDs that were added.\n        \"\"\"\n        items = []\n        for n in nodes:\n            items.append(\n                {\n                    \"id\": n.node_id,\n                    \"hash\": n.hash,\n                    \"text\": n.get_content(metadata_mode=MetadataMode.NONE),\n                    \"meta\": node_to_metadata_dict(n, remove_text=True),\n                    \"embedding\": n.get_embedding(),\n                    \"document_id\": n.ref_doc_id,\n                    \"relations\": node_to_relation_dict(n),\n                    \"source_uri\": add_kwargs.get(\"source_uri\"),\n                }\n            )\n\n        self._session.bulk_insert_mappings(self._chunk_db_model, items)\n        self._session.commit()\n        return [i[\"id\"] for i in items]\n\n    def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:\n        \"\"\"\n        Delete all nodes of a document from the vector store.\n\n        Args:\n            ref_doc_id (str): The reference document ID to be deleted.\n            **delete_kwargs: Additional keyword arguments to be passed to the delete method.\n\n        Returns:\n            None\n        \"\"\"\n        assert ref_doc_id.isdigit(), \"ref_doc_id must be an integer.\"\n        delete_stmt = delete(self._chunk_db_model).where(\n            self._chunk_db_model.document_id == int(ref_doc_id)\n        )\n        self._session.exec(delete_stmt)\n        self._session.commit()\n\n    def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult:\n        \"\"\"\n        Perform a similarity search with the given query embedding.\n\n        Args:\n            query (VectorStoreQuery): The query object containing the query data.\n            **kwargs: Additional keyword arguments.\n\n        Returns:\n            VectorStoreQueryResult: The result of the similarity search.\n\n        Raises:\n            ValueError: If the query embedding is not provided.\n        \"\"\"\n        # TODO:\n        # - Support advanced query filters\n        # - Support both pre-filter and post-filter\n        if query.query_embedding is None:\n            raise ValueError(\"Query embedding must be provided.\")\n\n        subquery = select(\n            self._chunk_db_model.id,\n            self._chunk_db_model.text,\n            self._chunk_db_model.meta,\n            self._chunk_db_model.document_id,\n            self._chunk_db_model.embedding.cosine_distance(query.query_embedding).label(\n                \"distance\"\n            ),\n        )\n\n        if query.filters:\n            for f in query.filters.filters:\n                subquery = subquery.stmt(self._chunk_db_model.meta[f.key] == f.value)\n\n        sub = alias(\n            subquery.order_by(asc(\"distance\"))\n            .limit(query.similarity_top_k * self._oversampling_factor)\n            .subquery(),\n            \"sub\",\n        )\n        stmt = (\n            select(\n                sub.c.id,\n                sub.c.text,\n                sub.c.meta,\n                sub.c.document_id,\n                sub.c.distance,\n            )\n            .order_by(asc(\"distance\"))\n            .limit(query.similarity_top_k)\n        )\n        results = self._session.exec(stmt)\n\n        nodes = []\n        similarities = []\n        ids = []\n        for row in results:\n            # Check if metadata contains required fields for node reconstruction\n            # to avoid async event loop issues in metadata_dict_to_node\n            if (\n                isinstance(row.meta, dict)\n                and \"_node_content\" in row.meta\n                and \"_node_type\" in row.meta\n            ):\n                try:\n                    node = metadata_dict_to_node(row.meta)\n                    node.id_ = str(row.id)\n                    node.metadata[\"document_id\"] = row.document_id\n                    node.set_content(row.text)\n                except Exception as e:\n                    # NOTE: deprecated legacy logic for backward compatibility\n                    logger.warning(\n                        f\"Failed to parse metadata dict (error: {e}), falling back to legacy logic.\",\n                        exc_info=True,\n                    )\n                    node = TextNode(\n                        id_=str(row.id),\n                        text=row.text,\n                        metadata=row.meta,\n                    )\n            else:\n                # Use legacy logic directly if metadata doesn't contain required fields\n                node = TextNode(\n                    id_=str(row.id),\n                    text=row.text,\n                    metadata=row.meta,\n                )\n            similarities.append((1 - row.distance) if row.distance is not None else 0)\n            ids.append(str(row.id))\n            nodes.append(node)\n        return VectorStoreQueryResult(\n            nodes=nodes,\n            similarities=similarities,\n            ids=ids,\n        )\n"
  },
  {
    "path": "backend/app/rag/knowledge_base/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/knowledge_base/config.py",
    "content": "import logging\n\nfrom llama_index.core.base.embeddings.base import BaseEmbedding\nfrom sqlmodel import Session\n\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.rag.llms.resolver import get_default_llm, resolve_llm\nfrom app.rag.embeddings.resolver import resolve_embed_model, get_default_embed_model\nfrom app.rag.llms.dspy import get_dspy_lm_by_llama_llm\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef get_kb_llm(session: Session, kb: KnowledgeBase):\n    db_llm = kb.llm\n    if db_llm:\n        return resolve_llm(\n            db_llm.provider, db_llm.model, db_llm.config, db_llm.credentials\n        )\n    else:\n        return get_default_llm(session)\n\n\ndef get_kb_dspy_llm(session: Session, kb: KnowledgeBase):\n    llm = get_kb_llm(session, kb)\n    return get_dspy_lm_by_llama_llm(llm)\n\n\ndef get_kb_embed_model(session: Session, kb: KnowledgeBase) -> BaseEmbedding:\n    db_embed_model = kb.embedding_model\n    if db_embed_model:\n        return resolve_embed_model(\n            db_embed_model.provider,\n            db_embed_model.model,\n            db_embed_model.config,\n            db_embed_model.credentials,\n        )\n    else:\n        return get_default_embed_model(session)\n"
  },
  {
    "path": "backend/app/rag/knowledge_base/index_store.py",
    "content": "from sqlalchemy import inspection\nfrom sqlmodel import Session\n\nfrom app.models import KnowledgeBase\nfrom app.models.chunk import get_kb_chunk_model\nfrom app.models.entity import get_kb_entity_model\nfrom app.rag.knowledge_base.config import get_kb_dspy_llm, get_kb_embed_model\nfrom app.models.relationship import get_kb_relationship_model\nfrom app.rag.indices.knowledge_graph.graph_store import TiDBGraphStore, TiDBGraphEditor\nfrom app.rag.indices.vector_search.vector_store.tidb_vector_store import TiDBVectorStore\n\n\ndef get_kb_tidb_vector_store(session: Session, kb: KnowledgeBase) -> TiDBVectorStore:\n    chunk_model = get_kb_chunk_model(kb)\n    vector_store = TiDBVectorStore(chunk_model, session=session)\n    return vector_store\n\n\ndef init_kb_tidb_vector_store(session: Session, kb: KnowledgeBase) -> TiDBVectorStore:\n    vector_store = get_kb_tidb_vector_store(session, kb)\n    vector_store.ensure_table_schema()\n    return vector_store\n\n\ndef get_kb_tidb_graph_store(session: Session, kb: KnowledgeBase) -> TiDBGraphStore:\n    dspy_lm = get_kb_dspy_llm(session, kb)\n    embed_model = get_kb_embed_model(session, kb)\n    entity_model = get_kb_entity_model(kb)\n    relationship_model = get_kb_relationship_model(kb)\n    inspection.inspect(relationship_model)\n    chunk_model = get_kb_chunk_model(kb)\n\n    graph_store = TiDBGraphStore(\n        knowledge_base=kb,\n        dspy_lm=dspy_lm,\n        session=session,\n        embed_model=embed_model,\n        entity_db_model=entity_model,\n        relationship_db_model=relationship_model,\n        chunk_db_model=chunk_model,\n    )\n    return graph_store\n\n\ndef init_kb_tidb_graph_store(session: Session, kb: KnowledgeBase) -> TiDBGraphStore:\n    graph_store = get_kb_tidb_graph_store(session, kb)\n    graph_store.ensure_table_schema()\n    return graph_store\n\n\ndef get_kb_tidb_graph_editor(session: Session, kb: KnowledgeBase) -> TiDBGraphEditor:\n    entity_db_model = get_kb_entity_model(kb)\n    relationship_db_model = get_kb_relationship_model(kb)\n    embed_model = get_kb_embed_model(session, kb)\n    return TiDBGraphEditor(\n        knowledge_base_id=kb.id,\n        entity_db_model=entity_db_model,\n        relationship_db_model=relationship_db_model,\n        embed_model=embed_model,\n    )\n"
  },
  {
    "path": "backend/app/rag/knowledge_base/schema.py",
    "content": "from enum import Enum\n\n\nclass KBIndexType(str, Enum):\n    VECTOR_SEARCH = \"VECTOR_SEARCH\"\n    KNOWLEDGE_GRAPH = \"KNOWLEDGE_GRAPH\"\n"
  },
  {
    "path": "backend/app/rag/llms/dspy.py",
    "content": "import dspy\n\nfrom llama_index.core.base.llms.base import BaseLLM\n\n\ndef get_dspy_lm_by_llama_llm(llama_llm: BaseLLM) -> dspy.LM:\n    \"\"\"\n    Get the dspy LM by the llama LLM.\n\n    In this project, we use both llama-index and dspy, both of them have their own LLM implementation.\n    This function can help us reduce the complexity of the code by converting the llama LLM to the dspy LLM.\n    \"\"\"\n    match llama_llm.class_name():\n        case \"openai_llm\":\n            return dspy.LM(\n                model=f\"openai/{llama_llm.model}\",\n                max_tokens=llama_llm.max_tokens,\n                api_key=llama_llm.api_key,\n                api_base=enforce_trailing_slash(llama_llm.api_base),\n                num_retries=3,\n            )\n        case \"OpenAILike\":\n            return dspy.LM(\n                model=f\"openai/{llama_llm.model}\",\n                max_tokens=llama_llm.max_tokens,\n                api_key=llama_llm.api_key,\n                api_base=enforce_trailing_slash(llama_llm.api_base),\n                model_type=\"chat\" if llama_llm.is_chat_model else \"text\",\n                num_retries=3,\n            )\n        case \"GenAI\":\n            if \"models/\" in llama_llm.model:\n                # For Gemini\n                model_name = llama_llm.model.split(\"models/\")[1]\n                return dspy.LM(\n                    model=f\"gemini/{model_name}\",\n                    max_tokens=llama_llm._max_tokens,\n                    api_key=llama_llm._client._api_client.api_key,\n                )\n            else:\n                # For Vertex AI\n                return dspy.LM(\n                    model=f\"vertex_ai/{llama_llm.model}\",\n                    max_tokens=llama_llm._max_tokens,\n                    context_window=llama_llm.context_window,\n                    temperature=llama_llm.temperature,\n                    vertex_location=llama_llm._location,\n                    vertex_credentials=llama_llm._credentials,\n                )\n        case \"Bedrock_Converse_LLM\":\n            return dspy.LM(\n                model=f\"bedrock/converse/{llama_llm.model}\",\n                # Notice: Bedrock's default max_tokens is 512, which is too small for the application.\n                max_tokens=llama_llm.max_tokens or 8192,\n                aws_access_key_id=llama_llm.aws_access_key_id,\n                aws_secret_access_key=llama_llm.aws_secret_access_key,\n                aws_region_name=llama_llm.region_name,\n            )\n        case \"Ollama_llm\":\n            return dspy.LM(\n                model=f\"ollama_chat/{llama_llm.model}\",\n                api_base=llama_llm.base_url,\n                timeout=llama_llm.request_timeout,\n                temperature=llama_llm.temperature,\n                num_retries=3,\n            )\n        case \"azure_openai_llm\":\n            return dspy.LM(\n                model=f\"azure/{llama_llm.model}\",\n                max_tokens=llama_llm.max_tokens,\n                temperature=llama_llm.temperature,\n                api_key=llama_llm.api_key,\n                api_base=enforce_trailing_slash(llama_llm.azure_endpoint),\n                api_version=llama_llm.api_version,\n                deployment_id=llama_llm.engine,\n            )\n        case _:\n            raise ValueError(f\"Got unknown LLM provider: {llama_llm.class_name()}\")\n\n\ndef enforce_trailing_slash(url: str):\n    if url.endswith(\"/\"):\n        return url\n    return url + \"/\"\n"
  },
  {
    "path": "backend/app/rag/llms/provider.py",
    "content": "import enum\n\nfrom typing import List\nfrom pydantic import BaseModel\n\n\nclass LLMProvider(str, enum.Enum):\n    OPENAI = \"openai\"\n    GEMINI = \"gemini\"\n    VERTEX_AI = \"vertex_ai\"\n    ANTHROPIC_VERTEX = \"anthropic_vertex\"  # Deprecated, use VERTEX_AI instead\n    OPENAI_LIKE = \"openai_like\"\n    BEDROCK = \"bedrock\"\n    OLLAMA = \"ollama\"\n    GITEEAI = \"giteeai\"\n    AZURE_OPENAI = \"azure_openai\"\n\n\nclass LLMProviderOption(BaseModel):\n    provider: LLMProvider\n    provider_display_name: str | None = None\n    provider_description: str | None = None\n    provider_url: str | None = None\n    default_llm_model: str\n    llm_model_description: str\n    default_config: dict = {}\n    config_description: str = \"\"\n    default_credentials: str | dict = \"\"\n    credentials_display_name: str\n    credentials_description: str\n    credentials_type: str = \"str\"\n\n\nllm_provider_options: List[LLMProviderOption] = [\n    LLMProviderOption(\n        provider=LLMProvider.OPENAI,\n        provider_display_name=\"OpenAI\",\n        provider_description=\"The OpenAI API provides a simple interface for developers to create an intelligence layer in their applications, powered by OpenAI's state of the art models.\",\n        provider_url=\"https://platform.openai.com\",\n        default_llm_model=\"gpt-4o\",\n        llm_model_description=\"\",\n        credentials_display_name=\"OpenAI API Key\",\n        credentials_description=\"The API key of OpenAI, you can find it in https://platform.openai.com/api-keys\",\n        credentials_type=\"str\",\n        default_credentials=\"sk-****\",\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.OPENAI_LIKE,\n        provider_display_name=\"OpenAI Like\",\n        default_llm_model=\"\",\n        llm_model_description=\"\",\n        default_config={\n            \"api_base\": \"https://openrouter.ai/api/v1/\",\n            \"is_chat_model\": True,\n        },\n        config_description=(\n            \"`api_base` is the API base URL of the third-party OpenAI-like service, such as OpenRouter; \"\n            \"`is_chat_model` indicates whether the model is chat model; \"\n            \"`context_window` is the maximum number of input tokens and output tokens; \"\n        ),\n        credentials_display_name=\"API Key\",\n        credentials_description=\"The API key of the third-party OpenAI-like service, such as OpenRouter, you can find it in their official website\",\n        credentials_type=\"str\",\n        default_credentials=\"sk-****\",\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.GEMINI,\n        provider_display_name=\"Gemini\",\n        provider_description=\"The Gemini API and Google AI Studio help you start working with Google's latest models. Access the whole Gemini model family and turn your ideas into real applications that scale.\",\n        provider_url=\"https://ai.google.dev/gemini-api\",\n        default_llm_model=\"models/gemini-2.0-flash\",\n        llm_model_description=\"Find the model code at https://ai.google.dev/gemini-api/docs/models/gemini\",\n        credentials_display_name=\"Google API Key\",\n        credentials_description=\"The API key of Google AI Studio, you can find it in https://aistudio.google.com/app/apikey\",\n        credentials_type=\"str\",\n        default_credentials=\"AIza****\",\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.VERTEX_AI,\n        provider_display_name=\"Vertex AI\",\n        provider_description=\"Vertex AI is a fully-managed, unified AI development platform for building and using generative AI.\",\n        provider_url=\"https://cloud.google.com/vertex-ai\",\n        default_llm_model=\"gemini-2.5-flash\",\n        llm_model_description=\"Find more in https://cloud.google.com/model-garden\",\n        credentials_display_name=\"Google Credentials JSON\",\n        credentials_description=\"The JSON Object of Google Credentials, refer to https://cloud.google.com/docs/authentication/provide-credentials-adc#on-prem\",\n        credentials_type=\"dict\",\n        default_credentials={\n            \"type\": \"service_account\",\n            \"project_id\": \"****\",\n            \"private_key_id\": \"****\",\n        },\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.OLLAMA,\n        provider_display_name=\"Ollama\",\n        provider_description=\"Ollama is a lightweight framework for building and running large language models.\",\n        provider_url=\"https://ollama.com\",\n        default_llm_model=\"llama3.2\",\n        llm_model_description=\"Find more in https://ollama.com/library\",\n        default_config={\n            \"base_url\": \"http://localhost:11434\",\n            \"context_window\": 8192,\n            \"request_timeout\": 60 * 10,\n        },\n        config_description=(\n            \"`base_url` is the base URL of the Ollama server, ensure it can be accessed from this server; \"\n            \"`context_window` is the maximum number of input tokens and output tokens; \"\n            \"`request_timeout` is the maximum time to wait for a generate response.\"\n        ),\n        credentials_display_name=\"Ollama API Key\",\n        credentials_description=\"Ollama doesn't require an API key, set a dummy string here is ok\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.GITEEAI,\n        provider_display_name=\"Gitee AI\",\n        provider_description=\"Gitee AI is a third-party model provider that offers ready-to-use cutting-edge model APIs for AI developers.\",\n        provider_url=\"https://ai.gitee.com\",\n        default_llm_model=\"Qwen2.5-72B-Instruct\",\n        default_config={\n            \"is_chat_model\": True,\n            \"context_window\": 131072,\n        },\n        config_description=(\n            \"`is_chat_model` indicates whether the model is chat model; \"\n            \"`context_window` is the maximum number of input tokens and output tokens; \"\n        ),\n        llm_model_description=\"Find more in https://ai.gitee.com/serverless-api\",\n        credentials_display_name=\"Gitee AI API Key\",\n        credentials_description=\"The API key of Gitee AI, you can find it in https://ai.gitee.com/dashboard/settings/tokens\",\n        credentials_type=\"str\",\n        default_credentials=\"****\",\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.BEDROCK,\n        provider_display_name=\"Bedrock\",\n        provider_description=\"Amazon Bedrock is a fully managed foundation models service.\",\n        provider_url=\"https://docs.aws.amazon.com/bedrock/\",\n        default_llm_model=\"anthropic.claude-3-7-sonnet-20250219-v1:0\",\n        llm_model_description=\"\",\n        credentials_display_name=\"AWS Bedrock Credentials JSON\",\n        credentials_description=\"The JSON Object of AWS Credentials, refer to https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-global\",\n        credentials_type=\"dict\",\n        default_credentials={\n            \"aws_access_key_id\": \"****\",\n            \"aws_secret_access_key\": \"****\",\n            \"aws_region_name\": \"us-west-2\",\n        },\n    ),\n    LLMProviderOption(\n        provider=LLMProvider.AZURE_OPENAI,\n        provider_display_name=\"Azure OpenAI\",\n        provider_description=\"Azure OpenAI is a cloud-based AI service that provides access to OpenAI's advanced language models.\",\n        provider_url=\"https://azure.microsoft.com/en-us/products/ai-services/openai-service\",\n        default_llm_model=\"gpt-4o\",\n        llm_model_description=\"\",\n        config_description=\"Refer to this document https://learn.microsoft.com/en-us/azure/ai-services/openai/quickstart to have more information about the Azure OpenAI API.\",\n        default_config={\n            \"azure_endpoint\": \"https://<your-resource-name>.openai.azure.com/\",\n            \"api_version\": \"<your-api-version>\",\n            \"engine\": \"<your-deployment-name>\",\n        },\n        credentials_display_name=\"Azure OpenAI API Key\",\n        credentials_description=\"The API key of Azure OpenAI\",\n        credentials_type=\"str\",\n        default_credentials=\"****\",\n    ),\n]\n"
  },
  {
    "path": "backend/app/rag/llms/resolver.py",
    "content": "from typing import Optional\n\nfrom llama_index.core.llms.llm import LLM\nfrom sqlmodel import Session\n\nfrom app.repositories.llm import llm_repo\nfrom app.rag.llms.provider import LLMProvider\n\n\ndef resolve_llm(\n    provider: LLMProvider,\n    model: str,\n    config: Optional[dict] = {},\n    credentials: Optional[str | list | dict] = None,\n) -> LLM:\n    match provider:\n        case LLMProvider.OPENAI:\n            from llama_index.llms.openai import OpenAI\n\n            return OpenAI(\n                model=model,\n                api_key=credentials,\n                **config,\n            )\n        case LLMProvider.OPENAI_LIKE:\n            from llama_index.llms.openai_like import OpenAILike\n\n            config.setdefault(\"is_chat_model\", True)\n            config.setdefault(\"context_window\", 200 * 1000)\n            return OpenAILike(model=model, api_key=credentials, **config)\n        case LLMProvider.BEDROCK:\n            from llama_index.llms.bedrock_converse import BedrockConverse\n\n            access_key_id = credentials[\"aws_access_key_id\"]\n            secret_access_key = credentials[\"aws_secret_access_key\"]\n            region_name = credentials[\"aws_region_name\"]\n\n            config.setdefault(\"max_tokens\", 4096)\n\n            return BedrockConverse(\n                model=model,\n                aws_access_key_id=access_key_id,\n                aws_secret_access_key=secret_access_key,\n                region_name=region_name,\n                **config,\n            )\n        case LLMProvider.GEMINI:\n            from llama_index.llms.google_genai import GoogleGenAI\n\n            return GoogleGenAI(model=model, api_key=credentials, **config)\n        case LLMProvider.VERTEX_AI | LLMProvider.ANTHROPIC_VERTEX:\n            from llama_index.llms.google_genai import GoogleGenAI\n            from llama_index.llms.google_genai.base import VertexAIConfig\n            from google.oauth2 import service_account\n            from google.auth.transport.requests import Request\n\n            google_creds: service_account.Credentials = (\n                service_account.Credentials.from_service_account_info(\n                    credentials,\n                    scopes=[\"https://www.googleapis.com/auth/cloud-platform\"],\n                )\n            )\n            google_creds.refresh(request=Request())\n            project = credentials.get(\"project_id\") or config.get(\"project\")\n            location = config.get(\"location\", \"us-west1\")\n\n            llm = GoogleGenAI(\n                model=model,\n                vertexai_config=VertexAIConfig(\n                    credentials=google_creds, project=project, location=location\n                ),\n                **config,\n            )\n\n            llm._project = project\n            llm._location = location\n            llm._credentials = credentials\n            return llm\n        case LLMProvider.GITEEAI:\n            from llama_index.llms.openai_like import OpenAILike\n\n            config.setdefault(\"is_chat_model\", True)\n            config.setdefault(\"context_window\", 128 * 1024)\n            return OpenAILike(\n                model=model,\n                api_base=\"https://ai.gitee.com/v1\",\n                api_key=credentials,\n                **config,\n            )\n        case LLMProvider.AZURE_OPENAI:\n            from llama_index.llms.azure_openai import AzureOpenAI\n\n            return AzureOpenAI(\n                model=model,\n                api_key=credentials,\n                **config,\n            )\n        case LLMProvider.OLLAMA:\n            from llama_index.llms.ollama import Ollama\n\n            config.setdefault(\"request_timeout\", 60 * 10)\n            config.setdefault(\"context_window\", 8192)\n            return Ollama(model=model, **config)\n        case _:\n            raise ValueError(f\"Got unknown LLM provider: {provider}\")\n\n\ndef get_llm_by_id(session: Session, llm_id: int) -> Optional[LLM]:\n    db_llm = llm_repo.get(session, llm_id)\n    if not db_llm:\n        return None\n    return resolve_llm(\n        db_llm.provider,\n        db_llm.model,\n        db_llm.config,\n        db_llm.credentials,\n    )\n\n\ndef must_get_llm_by_id(session: Session, llm_id: int) -> LLM:\n    db_llm = llm_repo.must_get(session, llm_id)\n    return resolve_llm(\n        db_llm.provider,\n        db_llm.model,\n        db_llm.config,\n        db_llm.credentials,\n    )\n\n\ndef get_default_llm(session: Session) -> Optional[LLM]:\n    db_llm = llm_repo.get_default(session)\n    if not db_llm:\n        return None\n    return resolve_llm(\n        db_llm.provider,\n        db_llm.model,\n        db_llm.config,\n        db_llm.credentials,\n    )\n\n\ndef must_get_default_llm(session: Session) -> LLM:\n    db_llm = llm_repo.must_get_default(session)\n    return resolve_llm(\n        db_llm.provider,\n        db_llm.model,\n        db_llm.config,\n        db_llm.credentials,\n    )\n\n\ndef get_llm_or_default(session: Session, llm_id: Optional[int]) -> LLM:\n    if llm_id is None:\n        return must_get_default_llm(session)\n    else:\n        return must_get_llm_by_id(session, llm_id)\n"
  },
  {
    "path": "backend/app/rag/node_parser/__init__.py",
    "content": "from .file.markdown import MarkdownNodeParser\n\n__all__ = [\"MarkdownNodeParser\"]\n"
  },
  {
    "path": "backend/app/rag/node_parser/file/markdown.py",
    "content": "import re\nfrom typing import Any, Dict, List, Optional, Sequence, Callable\n\nfrom llama_index.core.callbacks.base import CallbackManager\nfrom llama_index.core.node_parser import SentenceSplitter\nfrom llama_index.core.node_parser.interface import NodeParser\nfrom llama_index.core.node_parser.node_utils import build_nodes_from_splits\nfrom llama_index.core.schema import BaseNode, MetadataMode, TextNode\nfrom llama_index.core.utils import get_tqdm_iterable, get_tokenizer\nfrom llama_index.core.bridge.pydantic import Field, PrivateAttr\n\n\nDEFAULT_CHUNK_HEADER_LEVEL = 2\nDEFAULT_CHUNK_SIZE = 1200\n\n\nclass MarkdownNodeParser(NodeParser):\n    \"\"\"Markdown node parser.\n    Splits a document into Nodes using custom Markdown splitting logic.\n    Args:\n        include_metadata (bool): whether to include metadata in nodes\n        include_prev_next_rel (bool): whether to include prev/next relationships\n    \"\"\"\n\n    chunk_size: int = Field(\n        default=DEFAULT_CHUNK_SIZE,\n        description=\"The token chunk size for each chunk.\",\n        gt=0,\n    )\n    chunk_header_level: int = Field(\n        default=DEFAULT_CHUNK_HEADER_LEVEL,\n        description=\"The header level to split on\",\n        ge=1,\n        le=6,\n    )\n    _tokenizer: Callable = PrivateAttr()\n\n    def __init__(\n        self,\n        chunk_header_level: int = DEFAULT_CHUNK_HEADER_LEVEL,\n        chunk_size: int = DEFAULT_CHUNK_SIZE,\n        tokenizer: Optional[Callable] = None,\n        include_metadata: bool = True,\n        include_prev_next_rel: bool = True,\n        callback_manager: Optional[CallbackManager] = None,\n    ):\n        super().__init__(\n            chunk_header_level=chunk_header_level,\n            chunk_size=chunk_size,\n            include_metadata=include_metadata,\n            include_prev_next_rel=include_prev_next_rel,\n            callback_manager=callback_manager,\n        )\n        self.callback_manager = callback_manager or CallbackManager([])\n        self._tokenizer = tokenizer or get_tokenizer()\n\n    @classmethod\n    def from_defaults(\n        cls,\n        chunk_header_level: int = DEFAULT_CHUNK_HEADER_LEVEL,\n        chunk_size: int = DEFAULT_CHUNK_SIZE,\n        tokenizer: Optional[Callable] = None,\n        include_metadata: bool = True,\n        include_prev_next_rel: bool = True,\n        callback_manager: Optional[CallbackManager] = None,\n    ) -> \"MarkdownNodeParser\":\n        callback_manager = callback_manager or CallbackManager([])\n        tokenizer = tokenizer or get_tokenizer()\n        return cls(\n            chunk_header_level=chunk_header_level,\n            chunk_size=chunk_size,\n            tokenizer=tokenizer,\n            include_metadata=include_metadata,\n            include_prev_next_rel=include_prev_next_rel,\n            callback_manager=callback_manager,\n        )\n\n    @classmethod\n    def class_name(cls) -> str:\n        \"\"\"Get class name.\"\"\"\n        return \"MarkdownNodeParser\"\n\n    def _parse_nodes(\n        self,\n        nodes: Sequence[BaseNode],\n        show_progress: bool = False,\n        **kwargs: Any,\n    ) -> List[BaseNode]:\n        all_nodes: List[BaseNode] = []\n        nodes_with_progress = get_tqdm_iterable(nodes, show_progress, \"Parsing nodes\")\n\n        for node in nodes_with_progress:\n            splitted_nodes = self.get_nodes_from_node(\n                node,\n                self.chunk_header_level,\n                self.chunk_size * 0.7,\n                self.chunk_size * 1.1,\n            )\n            nodes = []\n            for sn in splitted_nodes:\n                header_level = sn.metadata.get(\"Header_Level\")\n                if header_level:\n                    for _hl in range(1, header_level + 1)[::-1]:\n                        if (\n                            f\"Header_{_hl}\" in sn.metadata\n                            and sn.metadata[f\"Header_{_hl}\"] not in sn.text\n                        ):\n                            sn.text = (\n                                f\"{'#' * _hl} {sn.metadata[f'Header_{_hl}']}\\n\\n\"\n                                + sn.text\n                            )\n                n = build_nodes_from_splits([sn.text], node, id_func=self.id_func)[0]\n                if self.include_metadata:\n                    n.metadata = {**node.metadata, **sn.metadata}\n                nodes.append(n)\n            all_nodes.extend(nodes)\n\n        return all_nodes\n\n    def get_nodes_from_node(\n        self,\n        node: BaseNode,\n        chunk_header_level: int,\n        chunk_size_small_threshold: float,\n        chunk_size_large_threshold: float,\n    ) -> List[TextNode]:\n        # print(chunk_header_level, chunk_size_small_threshold, chunk_size_large_threshold)\n        \"\"\"Get nodes from document.\"\"\"\n        text = node.get_content(metadata_mode=MetadataMode.NONE)\n        markdown_nodes = []\n        lines = text.split(\"\\n\")\n        metadata: Dict[str, str] = node.metadata\n        code_block = False\n        current_section = \"\"\n        first_header = True\n\n        for line in lines:\n            if line.lstrip().startswith(\"```\"):\n                code_block = not code_block\n            header_match = re.match(r\"^(#+)\\s(.*)\", line)\n            if header_match and not code_block:\n                current_header_level = len(header_match.group(1).strip())\n                if current_section != \"\" and current_header_level == chunk_header_level:\n                    if first_header:\n                        # skip the first header, merge it with the first section (usually the title of the document)\n                        first_header = False\n                    else:\n                        markdown_nodes.append(\n                            self._build_node_from_split(\n                                current_section.strip(), node, metadata\n                            )\n                        )\n                        current_section = \"\"\n                if current_header_level <= chunk_header_level:\n                    metadata = self._update_metadata(\n                        metadata, header_match.group(2), current_header_level\n                    )\n                current_section += line + \"\\n\"\n            else:\n                current_section += line + \"\\n\"\n\n        markdown_nodes.append(\n            self._build_node_from_split(current_section.strip(), node, metadata)\n        )\n        return self._normalize_node_sizes(\n            markdown_nodes, chunk_size_small_threshold, chunk_size_large_threshold\n        )\n\n    def _normalize_node_sizes(\n        self,\n        nodes: List[TextNode],\n        chunk_size_small_threshold: float,\n        chunk_size_large_threshold: float,\n    ) -> List[TextNode]:\n        # 1. Split the big node into multiple small nodes\n        # 2. Merge the small nodes into a big node if they are too small\n        # 3. Make all the nodes as much as possible close to the chunk size\n        nodes_token_size = [self._token_size(node.text) for node in nodes]\n        normalized_nodes = []\n        buffer = []\n        node_count = len(nodes)\n        i = 0\n\n        while i < node_count:\n            node = nodes[i]\n            this_chunk_size = nodes_token_size[i]\n            if this_chunk_size < chunk_size_small_threshold:\n                # if the last node is too small, merge it with the previous one\n                if (\n                    not buffer\n                    and i == (node_count - 1)\n                    and i > 0\n                    and nodes_token_size[i - 1] + this_chunk_size\n                    < chunk_size_large_threshold\n                ):\n                    normalized_nodes[-1].text += \"\\n\\n\" + node.text\n                    i += 1\n                    continue\n                buffer.append(this_chunk_size)\n                total = sum(buffer)\n                while (\n                    (i + 1) < node_count\n                    and nodes_token_size[i + 1] < self.chunk_size\n                    and total + nodes_token_size[i + 1] <= chunk_size_large_threshold\n                ):\n                    i += 1\n                    buffer.append(nodes_token_size[i])\n                    total += nodes_token_size[i]\n                # output the sum of the buffer\n                buffer_nodes = nodes[i - len(buffer) + 1 : i + 1]\n                normalized_nodes.append(\n                    TextNode(\n                        text=\"\\n\\n\".join([node.text for node in buffer_nodes]),\n                        metadata=buffer_nodes[0].metadata,\n                    )\n                )\n                i += 1\n                buffer.clear()\n            elif this_chunk_size > chunk_size_large_threshold:\n                # split into multiple nodes with next header level and bigger chunk size\n                md_splitted_nodes = self.get_nodes_from_node(\n                    node,\n                    self.chunk_header_level + 1,\n                    chunk_size_small_threshold,\n                    chunk_size_large_threshold * 1.1,\n                )\n                for n in md_splitted_nodes:\n                    _chunk_size = self._token_size(n.text)\n                    if _chunk_size > chunk_size_large_threshold * 1.1:\n                        # using sentence splitter to split the node if it's still too large\n                        sentence_splitted_nodes = SentenceSplitter(\n                            chunk_size=int(chunk_size_large_threshold), separator=\"\\n\\n\"\n                        ).get_nodes_from_documents([n])\n                        normalized_nodes.extend(sentence_splitted_nodes)\n                    else:\n                        normalized_nodes.append(n)\n                i += 1\n            else:\n                normalized_nodes.append(node)\n                i += 1\n        return normalized_nodes\n\n    def _update_metadata(\n        self, headers_metadata: dict, new_header: str, new_header_level: int\n    ) -> dict:\n        \"\"\"Update the markdown headers for metadata.\n        Removes all headers that are equal or less than the level\n        of the newly found header\n        \"\"\"\n        updated_headers = {}\n\n        for i in range(1, new_header_level):\n            key = f\"Header_{i}\"\n            if key in headers_metadata:\n                updated_headers[key] = headers_metadata[key]\n\n        updated_headers[f\"Header_{new_header_level}\"] = new_header\n        updated_headers[\"Header_Level\"] = new_header_level\n        return updated_headers\n\n    def _build_node_from_split(\n        self,\n        text_split: str,\n        node: BaseNode,\n        metadata: dict,\n    ) -> TextNode:\n        \"\"\"Build node from single text split.\"\"\"\n        node = build_nodes_from_splits([text_split], node, id_func=self.id_func)[0]\n\n        if self.include_metadata:\n            node.metadata = {**node.metadata, **metadata}\n\n        return node\n\n    def _token_size(self, text: str) -> int:\n        return len(self._tokenizer(text))\n"
  },
  {
    "path": "backend/app/rag/postprocessors/__init__.py",
    "content": "from .metadata_post_filter import MetadataPostFilter, MetadataFilters\n\n__all__ = [\n    \"MetadataPostFilter\",\n    \"MetadataFilters\",\n]\n"
  },
  {
    "path": "backend/app/rag/postprocessors/metadata_post_filter.py",
    "content": "import logging\n\nfrom typing import Dict, List, Optional, Any, Union\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.core.schema import BaseNode, NodeWithScore\nfrom llama_index.core.vector_stores.types import (\n    MetadataFilter,\n    MetadataFilters,\n    FilterOperator,\n    FilterCondition,\n)\n\n\nSimpleMetadataFilter = Dict[str, Any]\n\n\ndef simple_filter_to_metadata_filters(filters: SimpleMetadataFilter) -> MetadataFilters:\n    simple_filters = []\n    for key, value in filters.items():\n        simple_filters.append(\n            MetadataFilter(\n                key=key,\n                value=value,\n                operator=FilterOperator.EQ,\n            )\n        )\n    return MetadataFilters(filters=simple_filters)\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass MetadataPostFilter(BaseNodePostprocessor):\n    filters: Optional[MetadataFilters] = None\n\n    def __init__(\n        self,\n        filters: Optional[Union[MetadataFilters, SimpleMetadataFilter]] = None,\n        **kwargs: Any,\n    ):\n        super().__init__(**kwargs)\n        if isinstance(filters, MetadataFilters):\n            self.filters = filters\n        else:\n            self.filters = simple_filter_to_metadata_filters(filters)\n\n    def _postprocess_nodes(\n        self,\n        nodes: List[NodeWithScore],\n        query_bundle: Optional[QueryBundle] = None,\n    ) -> List[NodeWithScore]:\n        if self.filters is None:\n            return nodes\n\n        filtered_nodes = []\n        for node in nodes:\n            # TODO: support advanced post filtering.\n            if self.match_all_filters(node.node):\n                filtered_nodes.append(node)\n        return filtered_nodes\n\n    def match_all_filters(self, node: BaseNode) -> bool:\n        if self.filters is None or not isinstance(self.filters, MetadataFilters):\n            return True\n\n        if self.filters.condition != FilterCondition.AND:\n            logger.warning(\n                f\"Advanced filtering is not supported yet. \"\n                f\"Filter condition {self.filters.condition} is ignored.\"\n            )\n            return True\n\n        for f in self.filters.filters:\n            if f.key not in node.metadata:\n                return False\n\n            if f.operator is not None and f.operator != FilterOperator.EQ:\n                logger.warning(\n                    f\"Advanced filtering is not supported yet. \"\n                    f\"Filter operator {f.operator} is ignored.\"\n                )\n                return True\n\n            value = node.metadata[f.key]\n            if f.value != value:\n                return False\n\n        return True\n"
  },
  {
    "path": "backend/app/rag/query_dispatcher.py",
    "content": "import logging\nfrom typing import Optional, Sequence\nfrom llama_index.llms.openai import OpenAI\nfrom llama_index.core.tools.types import BaseTool\nfrom llama_index.core.tools import FunctionTool\n\nlogger = logging.getLogger(__name__)\n\nDefaultSystemPrompt = \"\"\"\nYou are a highly skilled customer assistant, responsible for dispatching user questions to the most appropriate tools or resources. Your primary objective is to ensure each user question is handled accurately and efficiently by selecting the best-suited tool for the task.\nFor more complex questions, you should break them down into clear, manageable sub-questions and route each to the relevant tools for individual resolution. It's important to maintain clarity and precision in this process, ensuring that the sub-questions are well-defined and can be resolved independently.\nIf you encounter concepts or entities you are not familiar with, you can break the query down into a sub-question to clarify the specific concept or entity. For example, if the query involves “what is the latest version,” you can treat this as a sub-question to better understand the context before proceeding with the solution.\n\"\"\"\n\n\nclass QueryDispatcher:\n    def __init__(self, llm: OpenAI, system_prompt: Optional[str] = None):\n        if system_prompt is None:\n            system_prompt = DefaultSystemPrompt\n\n        self._llm = llm\n        self._llm.system_prompt = system_prompt\n\n    def route(self, query: str, tools: Sequence[\"BaseTool\"]) -> str:\n        response = self._llm.chat_with_tools(\n            tools, query, allow_parallel_tool_calls=True, verbose=True\n        )\n\n        try:\n            tool_calls = self._llm.get_tool_calls_from_response(\n                response, error_on_no_tool_call=True\n            )\n        except Exception as e:\n            logger.exception(e)\n            return f\"An error occurred while processing the query: {query}\"\n\n        return tool_calls\n\n\n# mock the answer process\ndef answer(query: str) -> str:\n    \"\"\"\n    Answer a user query. The query should be simple and straightforward.\n    \"\"\"\n    return f\"I need some time to answer your question: {query}.\"\n\n\nanswer_tool = FunctionTool.from_defaults(fn=answer)\n"
  },
  {
    "path": "backend/app/rag/question_gen/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/question_gen/helpers.py",
    "content": "from typing import List\n\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.base.llms.types import ChatMessage\n\n\ndef get_query_bundle_from_chat(\n    user_question, chat_history: List[ChatMessage]\n) -> QueryBundle:\n    query_str = user_question\n    if len(chat_history) > 0:\n        chat_messages = [\n            f\"{message.role.value}: {message.content}\" for message in chat_history\n        ]\n        query_with_history = (\n            \"++++ Chat History ++++\\n\"\n            + \"\\n\".join(chat_messages)\n            + \"++++ Chat History ++++\\n\"\n        )\n        query_str = query_with_history + \"\\n\\nThen the user asks:\\n\" + user_question\n    return QueryBundle(query_str=query_str)\n"
  },
  {
    "path": "backend/app/rag/question_gen/query_decomposer.py",
    "content": "import logging\nimport dspy\nfrom typing import List, Optional\nfrom pydantic import BaseModel, Field\n\nlogger = logging.getLogger(__name__)\n\n\nclass SubQuestion(BaseModel):\n    \"\"\"Representation of a single step-by-step question extracted from the user query.\"\"\"\n\n    question: str = Field(\n        description=\"A step-by-step question to address the user query.\"\n    )\n    reasoning: str = Field(\n        description=\"The rationale behind the question to explain its relevance.\"\n    )\n\n\nclass SubQuestions(BaseModel):\n    \"\"\"Representation of the user's step-by-step questions extracted from the query.\"\"\"\n\n    questions: List[SubQuestion] = Field(\n        description=\"List of questions representing a plan to address the user query.\"\n    )\n\n\nclass DecomposeQuery(dspy.Signature):\n    \"\"\"You are an expert in knowledge base graph construction, specializing in building comprehensive knowledge graphs.\n    Your current task is to deconstruct the user's query into a series of step-by-step questions.\n\n    ## Instructions:\n\n    1. Dependency Analysis:\n\n        - Analyze the user's query to identify the underlying dependencies and relationships between different components.\n        - Construct a dependency graph that visually represents these relationships.\n\n    2. Question Breakdown: Divide the query into a sequence of step-by-step questions necessary to address the main query comprehensively.\n\n    3. Provide Reasoning: Explain the rationale behind each question.\n\n    4. Constraints:\n        - Limit the output to no more than 5 questions to maintain focus and relevance.\n        - Ensure accuracy by reflecting the user's true intentions based on the provided query.\n        - Ground all questions in factual information derived directly from the user's input.\n\n    Please only response in JSON format.\n    \"\"\"\n\n    query: str = dspy.InputField(\n        desc=\"The query text to extract the user's step-by-step questions.\"\n    )\n    subquestions: SubQuestions = dspy.OutputField(\n        desc=\"Representation of the user's step-by-step questions extracted from the query.\"\n    )\n\n\nclass DecomposeQueryModule(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.prog = dspy.Predict(DecomposeQuery)\n\n    def forward(self, query):\n        with dspy.settings.context(lm=self.dspy_lm):\n            return self.prog(query=query)\n\n\nclass QueryDecomposer:\n    def __init__(self, dspy_lm: dspy.LM, complied_program_path: Optional[str] = None):\n        self.decompose_query_prog = DecomposeQueryModule(dspy_lm=dspy_lm)\n        if complied_program_path is not None:\n            self.decompose_query_prog.load(complied_program_path)\n\n    def decompose(self, query: str) -> SubQuestions:\n        return self.decompose_query_prog(query=query).subquestions\n"
  },
  {
    "path": "backend/app/rag/rerankers/baisheng/baisheng_reranker.py",
    "content": "from typing import Any, List, Optional\nimport requests\n\nfrom llama_index.core.bridge.pydantic import Field, PrivateAttr\nfrom llama_index.core.callbacks import CBEventType, EventPayload\nfrom llama_index.core.instrumentation import get_dispatcher\nfrom llama_index.core.instrumentation.events.rerank import (\n    ReRankEndEvent,\n    ReRankStartEvent,\n)\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.core.schema import MetadataMode, NodeWithScore, QueryBundle\n\ndispatcher = get_dispatcher(__name__)\n\n\nclass BaishengRerank(BaseNodePostprocessor):\n    api_key: str = Field(default=\"\", description=\"API key.\")\n    api_url: str = Field(\n        default=\"http://api.chat.prd.yumc.local/chat/v1/reranker\",\n        description=\"API url.\",\n    )\n    model: str = Field(\n        default=\"bge-reranker-v2-m3\",\n        description=\"The model to use when calling API\",\n    )\n\n    top_n: int = Field(description=\"Top N nodes to return.\")\n\n    _session: Any = PrivateAttr()\n\n    def __init__(\n        self,\n        top_n: int = 2,\n        model: str = \"bge-reranker-v2-m3\",\n        api_key: str = \"\",\n        api_url: str = \"http://api.chat.prd.yumc.local/chat/v1/reranker\",\n    ):\n        super().__init__(top_n=top_n, model=model)\n        self.api_key = api_key\n        self.api_url = api_url\n        self.model = model\n        self._session = requests.Session()\n        self._session.headers.update({\"Authorization\": f\"Bearer {self.api_key}\"})\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"BaishengRerank\"\n\n    def _postprocess_nodes(\n        self,\n        nodes: List[NodeWithScore],\n        query_bundle: Optional[QueryBundle] = None,\n    ) -> List[NodeWithScore]:\n        dispatcher.event(\n            ReRankStartEvent(\n                query=query_bundle,\n                nodes=nodes,\n                top_n=self.top_n,\n                model_name=self.model,\n            )\n        )\n\n        if query_bundle is None:\n            raise ValueError(\"Missing query bundle in extra info.\")\n        if len(nodes) == 0:\n            return []\n\n        with self.callback_manager.event(\n            CBEventType.RERANKING,\n            payload={\n                EventPayload.NODES: nodes,\n                EventPayload.MODEL_NAME: self.model,\n                EventPayload.QUERY_STR: query_bundle.query_str,\n                EventPayload.TOP_K: self.top_n,\n            },\n        ) as event:\n            texts = [\n                node.node.get_content(metadata_mode=MetadataMode.EMBED)\n                for node in nodes\n            ]\n            resp = self._session.post(  # type: ignore\n                self.api_url,\n                json={\n                    \"query\": query_bundle.query_str,\n                    \"model\": self.model,\n                    \"sentences\": texts,\n                },\n            ).json()\n            if \"scores\" not in resp:\n                raise RuntimeError(f\"Got error from reranker: {resp}\")\n\n            results = zip(range(len(nodes)), resp[\"scores\"])\n            results = sorted(results, key=lambda x: x[1], reverse=True)[: self.top_n]\n\n            new_nodes = []\n            for result in results:\n                new_node_with_score = NodeWithScore(\n                    node=nodes[result[0]].node, score=result[1]\n                )\n                new_nodes.append(new_node_with_score)\n            event.on_end(payload={EventPayload.NODES: new_nodes})\n\n        dispatcher.event(ReRankEndEvent(nodes=new_nodes))\n        return new_nodes\n"
  },
  {
    "path": "backend/app/rag/rerankers/local/local_reranker.py",
    "content": "from typing import Any, List, Optional\nimport requests\n\nfrom llama_index.core.bridge.pydantic import Field, PrivateAttr\nfrom llama_index.core.callbacks import CBEventType, EventPayload\nfrom llama_index.core.instrumentation import get_dispatcher\nfrom llama_index.core.instrumentation.events.rerank import (\n    ReRankEndEvent,\n    ReRankStartEvent,\n)\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.core.schema import MetadataMode, NodeWithScore, QueryBundle\n\ndispatcher = get_dispatcher(__name__)\n\n\nclass LocalRerank(BaseNodePostprocessor):\n    api_url: str = Field(\n        default=\"http://127.0.0.1:5001/api/v1/reranker\",\n        description=\"API url.\",\n    )\n    model: str = Field(\n        default=\"BAAI/bge-reranker-v2-m3\",\n        description=\"The model to use when calling API\",\n    )\n\n    top_n: int = Field(description=\"Top N nodes to return.\")\n\n    _session: Any = PrivateAttr()\n\n    def __init__(\n        self,\n        top_n: int = 2,\n        model: str = \"BAAI/bge-reranker-v2-m3\",\n        api_url: str = \"http://127.0.0.1:5001/api/v1/reranker\",\n    ):\n        super().__init__(top_n=top_n, model=model)\n        self.api_url = api_url\n        self.model = model\n        self._session = requests.Session()\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"LocalRerank\"\n\n    def _postprocess_nodes(\n        self,\n        nodes: List[NodeWithScore],\n        query_bundle: Optional[QueryBundle] = None,\n    ) -> List[NodeWithScore]:\n        dispatcher.event(\n            ReRankStartEvent(\n                query=query_bundle,\n                nodes=nodes,\n                top_n=self.top_n,\n                model_name=self.model,\n            )\n        )\n\n        if query_bundle is None:\n            raise ValueError(\"Missing query bundle in extra info.\")\n        if len(nodes) == 0:\n            return []\n\n        with self.callback_manager.event(\n            CBEventType.RERANKING,\n            payload={\n                EventPayload.NODES: nodes,\n                EventPayload.MODEL_NAME: self.model,\n                EventPayload.QUERY_STR: query_bundle.query_str,\n                EventPayload.TOP_K: self.top_n,\n            },\n        ) as event:\n            texts = [\n                node.node.get_content(metadata_mode=MetadataMode.EMBED)\n                for node in nodes\n            ]\n            resp = self._session.post(  # type: ignore\n                self.api_url,\n                json={\n                    \"query\": query_bundle.query_str,\n                    \"model\": self.model,\n                    \"passages\": texts,\n                },\n            )\n            resp.raise_for_status()\n            resp_json = resp.json()\n            if \"scores\" not in resp_json:\n                raise RuntimeError(f\"Got error from reranker: {resp_json}\")\n\n            results = zip(range(len(nodes)), resp_json[\"scores\"])\n            results = sorted(results, key=lambda x: x[1], reverse=True)[: self.top_n]\n\n            new_nodes = []\n            for result in results:\n                new_node_with_score = NodeWithScore(\n                    node=nodes[result[0]].node, score=result[1]\n                )\n                new_nodes.append(new_node_with_score)\n            event.on_end(payload={EventPayload.NODES: new_nodes})\n\n        dispatcher.event(ReRankEndEvent(nodes=new_nodes))\n        return new_nodes\n"
  },
  {
    "path": "backend/app/rag/rerankers/provider.py",
    "content": "import enum\nfrom typing import List\nfrom pydantic import BaseModel\n\n\nclass RerankerProvider(str, enum.Enum):\n    JINA = \"jina\"\n    COHERE = \"cohere\"\n    BAISHENG = \"baisheng\"\n    LOCAL = \"local\"\n    VLLM = \"vllm\"\n    XINFERENCE = \"xinference\"\n    BEDROCK = \"bedrock\"\n\n\nclass RerankerProviderOption(BaseModel):\n    provider: RerankerProvider\n    provider_display_name: str | None = None\n    provider_description: str | None = None\n    provider_url: str | None = None\n    default_reranker_model: str\n    reranker_model_description: str\n    default_top_n: int = 10\n    default_credentials: str | dict = \"\"\n    default_config: dict = {}\n    config_description: str = \"\"\n    credentials_display_name: str\n    credentials_description: str\n    credentials_type: str = \"str\"\n\n\nreranker_provider_options: List[RerankerProviderOption] = [\n    RerankerProviderOption(\n        provider=RerankerProvider.JINA,\n        provider_display_name=\"Jina AI\",\n        provider_description=\"We provide best-in-class embeddings, rerankers, LLM-reader and prompt optimizers, pioneering search AI for multimodal data.\",\n        provider_url=\"https://jina.ai\",\n        default_reranker_model=\"jina-reranker-v2-base-multilingual\",\n        reranker_model_description=\"Reference: https://jina.ai/reranker/\",\n        default_top_n=10,\n        credentials_display_name=\"Jina API Key\",\n        credentials_description=\"You can get one from https://jina.ai/reranker/\",\n        credentials_type=\"str\",\n        default_credentials=\"jina_****\",\n    ),\n    RerankerProviderOption(\n        provider=RerankerProvider.COHERE,\n        provider_display_name=\"Cohere\",\n        provider_description=\"Cohere provides industry-leading large language models (LLMs) and RAG capabilities tailored to meet the needs of enterprise use cases that solve real-world problems.\",\n        provider_url=\"https://cohere.com/\",\n        default_reranker_model=\"rerank-multilingual-v3.0\",\n        reranker_model_description=\"Reference: https://docs.cohere.com/reference/rerank\",\n        default_top_n=10,\n        credentials_display_name=\"Cohere API Key\",\n        credentials_description=\"You can get one from https://dashboard.cohere.com/api-keys\",\n        credentials_type=\"str\",\n        default_credentials=\"*****\",\n    ),\n    RerankerProviderOption(\n        provider=RerankerProvider.BAISHENG,\n        provider_display_name=\"BaiSheng\",\n        default_reranker_model=\"bge-reranker-v2-m3\",\n        reranker_model_description=\"\",\n        default_top_n=10,\n        default_config={\n            \"api_url\": \"http://api.chat.prd.yumc.local/chat/v1/reranker\",\n        },\n        credentials_display_name=\"BaiSheng API Key\",\n        credentials_description=\"\",\n        credentials_type=\"str\",\n        default_credentials=\"*****\",\n    ),\n    RerankerProviderOption(\n        provider=RerankerProvider.LOCAL,\n        provider_display_name=\"Local Reranker\",\n        provider_description=\"TIDB.AI's local reranker server, deployed on your own infrastructure and powered by sentence-transformers.\",\n        default_reranker_model=\"BAAI/bge-reranker-v2-m3\",\n        reranker_model_description=\"Find more models in huggingface.\",\n        default_top_n=10,\n        default_config={\n            \"api_url\": \"http://local-embedding-reranker:5001/api/v1/reranker\",\n        },\n        config_description=\"api_url is the url of the tidb ai local reranker server.\",\n        credentials_display_name=\"Local Reranker API Key\",\n        credentials_description=\"Local Reranker server doesn't require an API key, set a dummy string here is ok.\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n    RerankerProviderOption(\n        provider=RerankerProvider.VLLM,\n        provider_display_name=\"vLLM\",\n        provider_description=\"vLLM is a fast and easy-to-use library for LLM inference and serving.\",\n        default_reranker_model=\"BAAI/bge-reranker-v2-m3\",\n        reranker_model_description=\"Reference: https://docs.vllm.ai/en/latest/models/supported_models.html#sentence-pair-scoring-task-score\",\n        default_top_n=10,\n        default_config={\n            \"base_url\": \"http://localhost:8000\",\n        },\n        config_description=\"base_url is the base url of the vLLM server, ensure it can be accessed from this server\",\n        credentials_display_name=\"vLLM API Key\",\n        credentials_description=\"vLLM doesn't require an API key, set a dummy string here is ok\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n    RerankerProviderOption(\n        provider=RerankerProvider.XINFERENCE,\n        provider_display_name=\"Xinference Reranker\",\n        provider_description=\"Xorbits Inference (Xinference) is an open-source platform to streamline the operation and integration of a wide array of AI models.\",\n        default_reranker_model=\"bge-reranker-v2-m3\",\n        reranker_model_description=\"Reference: https://inference.readthedocs.io/en/latest/models/model_abilities/rerank.html\",\n        default_top_n=10,\n        default_config={\n            \"base_url\": \"http://localhost:9997\",\n        },\n        config_description=\"base_url is the url of the Xinference server, ensure it can be accessed from this server\",\n        credentials_display_name=\"Xinference API Key\",\n        credentials_description=\"Xinference doesn't require an API key, set a dummy string here is ok\",\n        credentials_type=\"str\",\n        default_credentials=\"dummy\",\n    ),\n    RerankerProviderOption(\n        provider=RerankerProvider.BEDROCK,\n        provider_display_name=\"Bedrock Reranker\",\n        provider_description=\"Amazon Bedrock is a fully managed foundation models service.\",\n        provider_url=\"https://docs.aws.amazon.com/bedrock/\",\n        default_reranker_model=\"amazon.rerank-v1:0\",\n        reranker_model_description=\"Find more models in https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models-reference.html.\",\n        default_top_n=10,\n        credentials_display_name=\"AWS Bedrock Credentials JSON\",\n        credentials_description=\"The JSON Object of AWS Credentials, refer to https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-global\",\n        credentials_type=\"dict\",\n        default_credentials={\n            \"aws_access_key_id\": \"****\",\n            \"aws_secret_access_key\": \"****\",\n            \"aws_region_name\": \"us-west-2\",\n        },\n    ),\n]\n"
  },
  {
    "path": "backend/app/rag/rerankers/resolver.py",
    "content": "from typing import Optional\nfrom sqlmodel import Session\n\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.postprocessor.jinaai_rerank import JinaRerank\nfrom llama_index.postprocessor.cohere_rerank import CohereRerank\nfrom llama_index.postprocessor.xinference_rerank import XinferenceRerank\nfrom llama_index.postprocessor.bedrock_rerank import AWSBedrockRerank\n\nfrom app.rag.rerankers.baisheng.baisheng_reranker import BaishengRerank\nfrom app.rag.rerankers.local.local_reranker import LocalRerank\nfrom app.rag.rerankers.vllm.vllm_reranker import VLLMRerank\nfrom app.rag.rerankers.provider import RerankerProvider\n\nfrom app.repositories.reranker_model import reranker_model_repo\n\n\ndef resolve_reranker_by_id(\n    session: Session, reranker_model_id: int, top_n: int\n) -> BaseNodePostprocessor:\n    db_reranker_model = reranker_model_repo.must_get(session, reranker_model_id)\n    return resolve_reranker(\n        db_reranker_model.provider,\n        db_reranker_model.model,\n        top_n or db_reranker_model.top_n,\n        db_reranker_model.config,\n        db_reranker_model.credentials,\n    )\n\n\ndef resolve_reranker(\n    provider: RerankerProvider,\n    model: str,\n    top_n: int,\n    config: dict,\n    credentials: str | list | dict | None,\n) -> BaseNodePostprocessor:\n    match provider:\n        case RerankerProvider.JINA:\n            return JinaRerank(\n                model=model,\n                top_n=top_n,\n                api_key=credentials,\n                **config,\n            )\n        case RerankerProvider.COHERE:\n            return CohereRerank(\n                model=model,\n                top_n=top_n,\n                api_key=credentials,\n                **config,\n            )\n        case RerankerProvider.BAISHENG:\n            return BaishengRerank(\n                model=model,\n                top_n=top_n,\n                api_key=credentials,\n                **config,\n            )\n        case RerankerProvider.LOCAL:\n            return LocalRerank(\n                model=model,\n                top_n=top_n,\n                **config,\n            )\n        case RerankerProvider.VLLM:\n            return VLLMRerank(\n                model=model,\n                top_n=top_n,\n                **config,\n            )\n        case RerankerProvider.XINFERENCE:\n            return XinferenceRerank(\n                model=model,\n                top_n=top_n,\n                **config,\n            )\n        case RerankerProvider.BEDROCK:\n            return AWSBedrockRerank(\n                rerank_model_name=model,\n                top_n=top_n,\n                aws_access_key_id=credentials[\"aws_access_key_id\"],\n                aws_secret_access_key=credentials[\"aws_secret_access_key\"],\n                region_name=credentials[\"aws_region_name\"],\n                **config,\n            )\n        case _:\n            raise ValueError(f\"Got unknown reranker provider: {provider}\")\n\n\n# FIXME: Reranker top_n should be config in the retrieval config.\ndef get_default_reranker_model(\n    session: Session, top_n: int = None\n) -> Optional[BaseNodePostprocessor]:\n    db_reranker = reranker_model_repo.get_default(session)\n    if not db_reranker:\n        return None\n    top_n = db_reranker.top_n if top_n is None else top_n\n    return resolve_reranker(\n        db_reranker.provider,\n        db_reranker.model,\n        top_n,\n        db_reranker.config,\n        db_reranker.credentials,\n    )\n\n\ndef must_get_default_reranker_model(session: Session) -> BaseNodePostprocessor:\n    db_reranker = reranker_model_repo.must_get_default(session)\n    return resolve_reranker(\n        db_reranker.provider,\n        db_reranker.model,\n        db_reranker.top_n,\n        db_reranker.config,\n        db_reranker.credentials,\n    )\n"
  },
  {
    "path": "backend/app/rag/rerankers/vllm/vllm_reranker.py",
    "content": "from typing import Any, List, Optional\nimport requests\n\nfrom llama_index.core.bridge.pydantic import Field, PrivateAttr\nfrom llama_index.core.callbacks import CBEventType, EventPayload\nfrom llama_index.core.instrumentation import get_dispatcher\nfrom llama_index.core.instrumentation.events.rerank import (\n    ReRankEndEvent,\n    ReRankStartEvent,\n)\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.core.schema import MetadataMode, NodeWithScore, QueryBundle\n\ndispatcher = get_dispatcher(__name__)\n\n\nclass VLLMRerank(BaseNodePostprocessor):\n    base_url: str = Field(default=\"\", description=\"The base URL of vLLM API.\")\n    model: str = Field(default=\"\", description=\"The model to use when calling API.\")\n\n    top_n: int = Field(description=\"Top N nodes to return.\")\n\n    _session: Any = PrivateAttr()\n\n    def __init__(\n        self,\n        top_n: int = 2,\n        model: str = \"BAAI/bge-reranker-v2-m3\",\n        base_url: str = \"http://localhost:8000\",\n    ):\n        super().__init__(top_n=top_n, model=model)\n        self.base_url = base_url\n        self.model = model\n        self._session = requests.Session()\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"VLLMRerank\"\n\n    def _postprocess_nodes(\n        self,\n        nodes: List[NodeWithScore],\n        query_bundle: Optional[QueryBundle] = None,\n    ) -> List[NodeWithScore]:\n        dispatcher.event(\n            ReRankStartEvent(\n                query=query_bundle,\n                nodes=nodes,\n                top_n=self.top_n,\n                model_name=self.model,\n            )\n        )\n\n        if query_bundle is None:\n            raise ValueError(\"Missing query bundle in extra info.\")\n        if len(nodes) == 0:\n            return []\n\n        with self.callback_manager.event(\n            CBEventType.RERANKING,\n            payload={\n                EventPayload.NODES: nodes,\n                EventPayload.MODEL_NAME: self.model,\n                EventPayload.QUERY_STR: query_bundle.query_str,\n                EventPayload.TOP_K: self.top_n,\n            },\n        ) as event:\n            texts = [\n                node.node.get_content(metadata_mode=MetadataMode.EMBED)\n                for node in nodes\n            ]\n            resp = self._session.post(  # type: ignore\n                url=f\"{self.base_url}/v1/score\",\n                json={\n                    \"text_1\": query_bundle.query_str,\n                    \"model\": self.model,\n                    \"text_2\": texts,\n                },\n            )\n            resp.raise_for_status()\n            resp_json = resp.json()\n            if \"data\" not in resp_json:\n                raise RuntimeError(f\"Got error from reranker: {resp_json}\")\n\n            results = zip(range(len(nodes)), resp_json[\"data\"])\n            results = sorted(results, key=lambda x: x[1][\"score\"], reverse=True)[\n                : self.top_n\n            ]\n\n            new_nodes = []\n            for result in results:\n                new_node_with_score = NodeWithScore(\n                    node=nodes[result[0]].node, score=result[1][\"score\"]\n                )\n                new_nodes.append(new_node_with_score)\n            event.on_end(payload={EventPayload.NODES: new_nodes})\n\n        dispatcher.event(ReRankEndEvent(nodes=new_nodes))\n        return new_nodes\n"
  },
  {
    "path": "backend/app/rag/retrievers/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/retrievers/chunk/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/retrievers/chunk/fusion_retriever.py",
    "content": "from typing import List, Optional, Dict, Tuple\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.callbacks import CallbackManager\nfrom llama_index.core.llms import LLM\nfrom llama_index.core.schema import NodeWithScore\nfrom sqlmodel import Session\nfrom app.rag.retrievers.chunk.simple_retriever import (\n    ChunkSimpleRetriever,\n)\nfrom app.rag.retrievers.chunk.schema import (\n    RetrievedChunkDocument,\n    VectorSearchRetrieverConfig,\n    ChunksRetrievalResult,\n    ChunkRetriever,\n)\nfrom app.rag.retrievers.chunk.helpers import map_nodes_to_chunks\nfrom app.rag.retrievers.multiple_knowledge_base import MultiKBFusionRetriever\nfrom app.repositories import knowledge_base_repo, document_repo\n\n\nclass ChunkFusionRetriever(MultiKBFusionRetriever, ChunkRetriever):\n    def __init__(\n        self,\n        db_session: Session,\n        knowledge_base_ids: List[int],\n        llm: LLM,\n        use_query_decompose: bool = False,\n        config: VectorSearchRetrieverConfig = VectorSearchRetrieverConfig(),\n        callback_manager: Optional[CallbackManager] = CallbackManager([]),\n        **kwargs,\n    ):\n        # Prepare vector search retrievers for knowledge bases.\n        retrievers = []\n        knowledge_bases = knowledge_base_repo.get_by_ids(db_session, knowledge_base_ids)\n        for kb in knowledge_bases:\n            retrievers.append(\n                ChunkSimpleRetriever(\n                    knowledge_base_id=kb.id,\n                    config=config,\n                    callback_manager=callback_manager,\n                    db_session=db_session,\n                )\n            )\n\n        super().__init__(\n            db_session=db_session,\n            retrievers=retrievers,\n            llm=llm,\n            use_query_decompose=use_query_decompose,\n            callback_manager=callback_manager,\n            **kwargs,\n        )\n\n    def _fusion(\n        self, query: str, results: Dict[Tuple[str, int], List[NodeWithScore]]\n    ) -> List[NodeWithScore]:\n        return self._simple_fusion(query, results)\n\n    def _simple_fusion(\n        self, query: str, results: Dict[Tuple[str, int], List[NodeWithScore]]\n    ):\n        \"\"\"Apply simple fusion.\"\"\"\n        # Use a dict to de-duplicate nodes\n        all_nodes: Dict[str, NodeWithScore] = {}\n        for nodes_with_scores in results.values():\n            for node_with_score in nodes_with_scores:\n                hash = node_with_score.node.hash\n                if hash in all_nodes:\n                    max_score = max(\n                        node_with_score.score or 0.0, all_nodes[hash].score or 0.0\n                    )\n                    all_nodes[hash].score = max_score\n                else:\n                    all_nodes[hash] = node_with_score\n\n        return sorted(all_nodes.values(), key=lambda x: x.score or 0.0, reverse=True)\n\n    def retrieve_chunks(\n        self,\n        query_str: str,\n        full_document: bool = False,\n    ) -> ChunksRetrievalResult:\n        nodes_with_score = self._retrieve(QueryBundle(query_str))\n        chunks = map_nodes_to_chunks(nodes_with_score)\n\n        document_ids = [c.document_id for c in chunks]\n        documents = document_repo.fetch_by_ids(self._db_session, document_ids)\n        if full_document:\n            return ChunksRetrievalResult(chunks=chunks, documents=documents)\n        else:\n            return ChunksRetrievalResult(\n                chunks=chunks,\n                documents=[\n                    RetrievedChunkDocument(\n                        id=d.id, name=d.name, source_uri=d.source_uri\n                    )\n                    for d in documents\n                ],\n            )\n"
  },
  {
    "path": "backend/app/rag/retrievers/chunk/helpers.py",
    "content": "from typing import List\n\nfrom llama_index.core.schema import NodeWithScore\n\nfrom app.rag.retrievers.chunk.schema import RetrievedChunk\n\n\ndef map_nodes_to_chunks(nodes_with_score: List[NodeWithScore]) -> List[RetrievedChunk]:\n    return [\n        RetrievedChunk(\n            id=ns.node.node_id,\n            text=ns.node.text,\n            metadata=ns.node.metadata,\n            document_id=ns.node.metadata[\"document_id\"],\n            score=ns.score,\n        )\n        for ns in nodes_with_score\n    ]\n"
  },
  {
    "path": "backend/app/rag/retrievers/chunk/schema.py",
    "content": "from abc import ABC\nfrom typing import Any, Dict, Optional\n\nfrom pydantic import BaseModel\n\nfrom app.models import Document\n\n\nclass RerankerConfig(BaseModel):\n    enabled: bool = True\n    model_id: int = None\n    top_n: int = 10\n\n\nclass MetadataFilterConfig(BaseModel):\n    enabled: bool = True\n    filters: Dict[str, Any] = None\n\n\nclass VectorSearchRetrieverConfig(BaseModel):\n    top_k: int = 10\n    similarity_top_k: Optional[int] = None\n    oversampling_factor: Optional[int] = 5\n    reranker: Optional[RerankerConfig] = None\n    metadata_filter: Optional[MetadataFilterConfig] = None\n\n\nclass KBChunkRetrievalConfig(BaseModel):\n    knowledge_base_ids: list[int] = None\n\n\nclass ChunkRetrievalConfig(BaseModel):\n    knowledge_base_ids: list[int] = None\n\n\n# Retrieved Chunks\n\n\nclass RetrievedChunkDocument(BaseModel):\n    id: int\n    name: str\n    source_uri: str\n\n\nclass RetrievedChunk(BaseModel):\n    id: str\n    text: str\n    metadata: dict\n    document_id: Optional[int]\n    score: float\n\n\nclass ChunksRetrievalResult(BaseModel):\n    chunks: list[RetrievedChunk]\n    documents: Optional[list[Document | RetrievedChunkDocument]] = None\n\n\nclass ChunkRetriever(ABC):\n    def retrieve_chunks(\n        self,\n        query_str: str,\n        full_document: bool = False,\n    ) -> ChunksRetrievalResult:\n        \"\"\"Retrieve chunks\"\"\"\n"
  },
  {
    "path": "backend/app/rag/retrievers/chunk/simple_retriever.py",
    "content": "import logging\n\nfrom typing import List, Optional, Type\n\nfrom llama_index.core.callbacks import CallbackManager\nfrom llama_index.core.indices.utils import log_vector_store_query_result\nfrom llama_index.core.vector_stores import VectorStoreQuery, VectorStoreQueryResult\nfrom sqlmodel import Session\nfrom llama_index.core.retrievers import BaseRetriever\nfrom llama_index.core.schema import NodeWithScore, QueryBundle\nimport llama_index.core.instrumentation as instrument\nfrom sqlmodel import SQLModel\n\nfrom app.models.chunk import get_kb_chunk_model\nfrom app.rag.knowledge_base.config import get_kb_embed_model\nfrom app.rag.rerankers.resolver import resolve_reranker_by_id\nfrom app.rag.retrievers.chunk.schema import (\n    RetrievedChunkDocument,\n    VectorSearchRetrieverConfig,\n    ChunksRetrievalResult,\n    ChunkRetriever,\n)\nfrom app.rag.retrievers.chunk.helpers import map_nodes_to_chunks\nfrom app.rag.indices.vector_search.vector_store.tidb_vector_store import TiDBVectorStore\nfrom app.rag.postprocessors.metadata_post_filter import MetadataPostFilter\nfrom app.repositories import knowledge_base_repo, document_repo\n\nlogger = logging.getLogger(__name__)\n\n\ndispatcher = instrument.get_dispatcher(__name__)\n\n\nclass ChunkSimpleRetriever(BaseRetriever, ChunkRetriever):\n    _chunk_model: Type[SQLModel]\n\n    def __init__(\n        self,\n        knowledge_base_id: int,\n        config: VectorSearchRetrieverConfig,\n        db_session: Optional[Session] = None,\n        callback_manager: CallbackManager = CallbackManager([]),\n    ):\n        super().__init__()\n        if not knowledge_base_id:\n            raise ValueError(\"Knowledge base id is required\")\n\n        self._config = config\n        self._db_session = db_session\n        self._kb = knowledge_base_repo.must_get(db_session, knowledge_base_id)\n        self._chunk_db_model = get_kb_chunk_model(self._kb)\n        self._embed_model = get_kb_embed_model(db_session, self._kb)\n        self._embed_model.callback_manager = callback_manager\n\n        # Init vector store.\n        self._vector_store = TiDBVectorStore(\n            session=db_session,\n            chunk_db_model=self._chunk_db_model,\n            oversampling_factor=config.oversampling_factor,\n            callback_manager=callback_manager,\n        )\n\n        # Init node postprocessors.\n        node_postprocessors = []\n\n        # Metadata filter\n        filter_config = config.metadata_filter\n        if filter_config and filter_config.enabled:\n            metadata_filter = MetadataPostFilter(filter_config.filters)\n            node_postprocessors.append(metadata_filter)\n\n        # Reranker\n        reranker_config = config.reranker\n        if reranker_config and reranker_config.enabled:\n            reranker = resolve_reranker_by_id(\n                db_session, reranker_config.model_id, reranker_config.top_n\n            )\n            node_postprocessors.append(reranker)\n\n        self._node_postprocessors = node_postprocessors\n\n    @dispatcher.span\n    def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:\n        if query_bundle.embedding is None and len(query_bundle.embedding_strs) > 0:\n            query_bundle.embedding = self._embed_model.get_agg_embedding_from_queries(\n                query_bundle.embedding_strs\n            )\n\n        result = self._vector_store.query(\n            VectorStoreQuery(\n                query_str=query_bundle.query_str,\n                query_embedding=query_bundle.embedding,\n                similarity_top_k=self._config.similarity_top_k or self._config.top_k,\n            )\n        )\n        nodes = self._build_node_list_from_query_result(result)\n\n        for node_postprocessor in self._node_postprocessors:\n            nodes = node_postprocessor.postprocess_nodes(\n                nodes, query_bundle=query_bundle\n            )\n\n        return nodes[: self._config.top_k]\n\n    def _build_node_list_from_query_result(\n        self, query_result: VectorStoreQueryResult\n    ) -> List[NodeWithScore]:\n        log_vector_store_query_result(query_result)\n        node_with_scores: List[NodeWithScore] = []\n        for ind, node in enumerate(query_result.nodes):\n            score: Optional[float] = None\n            if query_result.similarities is not None:\n                score = query_result.similarities[ind]\n            node_with_scores.append(NodeWithScore(node=node, score=score))\n\n        return node_with_scores\n\n    def retrieve_chunks(\n        self, query_str: str, full_document: bool = False\n    ) -> ChunksRetrievalResult:\n        nodes_with_score = self.retrieve(query_str)\n        chunks = map_nodes_to_chunks(nodes_with_score)\n        document_ids = [c.document_id for c in chunks]\n        documents = document_repo.fetch_by_ids(self._db_session, document_ids)\n\n        if full_document:\n            return ChunksRetrievalResult(chunks=chunks, documents=documents)\n        else:\n            return ChunksRetrievalResult(\n                chunks=chunks,\n                documents=[\n                    RetrievedChunkDocument(\n                        id=d.id, name=d.name, source_uri=d.source_uri\n                    )\n                    for d in documents\n                ],\n            )\n"
  },
  {
    "path": "backend/app/rag/retrievers/knowledge_graph/__init__.py",
    "content": ""
  },
  {
    "path": "backend/app/rag/retrievers/knowledge_graph/fusion_retriever.py",
    "content": "import logging\n\nfrom sqlmodel import Session\nfrom typing import List, Optional, Dict, Tuple\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.callbacks import CallbackManager\nfrom llama_index.core.schema import NodeWithScore\nfrom llama_index.core.llms import LLM\n\nfrom app.models import KnowledgeBase\nfrom app.rag.retrievers.multiple_knowledge_base import MultiKBFusionRetriever\nfrom app.rag.retrievers.knowledge_graph.simple_retriever import (\n    KnowledgeGraphSimpleRetriever,\n)\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrieverConfig,\n    KnowledgeGraphRetrievalResult,\n    KnowledgeGraphNode,\n    KnowledgeGraphRetriever,\n)\nfrom app.repositories import knowledge_base_repo\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass KnowledgeGraphFusionRetriever(MultiKBFusionRetriever, KnowledgeGraphRetriever):\n    knowledge_base_map: Dict[int, KnowledgeBase] = {}\n\n    def __init__(\n        self,\n        db_session: Session,\n        knowledge_base_ids: List[int],\n        llm: LLM,\n        use_query_decompose: bool = False,\n        config: KnowledgeGraphRetrieverConfig = KnowledgeGraphRetrieverConfig(),\n        callback_manager: Optional[CallbackManager] = CallbackManager([]),\n        **kwargs,\n    ):\n        self.use_query_decompose = use_query_decompose\n\n        # Prepare knowledge graph retrievers for knowledge bases.\n        retrievers = []\n        knowledge_bases = knowledge_base_repo.get_by_ids(db_session, knowledge_base_ids)\n        self.knowledge_bases = knowledge_bases\n        for kb in knowledge_bases:\n            self.knowledge_base_map[kb.id] = kb\n            retrievers.append(\n                KnowledgeGraphSimpleRetriever(\n                    db_session=db_session,\n                    knowledge_base_id=kb.id,\n                    config=config,\n                    callback_manager=callback_manager,\n                )\n            )\n\n        super().__init__(\n            db_session=db_session,\n            retrievers=retrievers,\n            llm=llm,\n            use_query_decompose=use_query_decompose,\n            callback_manager=callback_manager,\n            **kwargs,\n        )\n\n    def retrieve_knowledge_graph(\n        self, query_text: str\n    ) -> KnowledgeGraphRetrievalResult:\n        nodes_with_score = self._retrieve(QueryBundle(query_text))\n        if len(nodes_with_score) == 0:\n            return KnowledgeGraphRetrievalResult()\n        node: KnowledgeGraphNode = nodes_with_score[0].node  # type:ignore\n\n        return KnowledgeGraphRetrievalResult(\n            query=node.query,\n            knowledge_bases=[kb.to_descriptor() for kb in self.knowledge_bases],\n            entities=node.entities,\n            relationships=node.relationships,\n            subgraphs=[\n                KnowledgeGraphRetrievalResult(\n                    query=child_node.query,\n                    knowledge_base=self.knowledge_base_map[\n                        child_node.knowledge_base_id\n                    ].to_descriptor(),\n                    entities=child_node.entities,\n                    relationships=child_node.relationships,\n                )\n                for child_node in node.children\n            ],\n        )\n\n    def _fusion(\n        self, query: str, results: Dict[Tuple[str, int], List[NodeWithScore]]\n    ) -> List[NodeWithScore]:\n        return self._knowledge_graph_fusion(query, results)\n\n    def _knowledge_graph_fusion(\n        self, query: str, results: Dict[Tuple[str, int], List[NodeWithScore]]\n    ) -> List[NodeWithScore]:\n        merged_entities = set()\n        merged_relationships = {}\n        merged_knowledge_base_ids = set()\n        merged_children_nodes = []\n\n        for nodes_with_scores in results.values():\n            if len(nodes_with_scores) == 0:\n                continue\n            node: KnowledgeGraphNode = nodes_with_scores[0].node  # type:ignore\n\n            # Merge knowledge base id.\n            merged_knowledge_base_ids.add(node.knowledge_base_id)\n\n            # Merge entities.\n            merged_entities.update(node.entities)\n\n            # Merge relationships.\n            for r in node.relationships:\n                key = r.rag_description\n                if key not in merged_relationships:\n                    merged_relationships[key] = r\n                else:\n                    merged_relationships[key].weight += r.weight\n            # Merge to children nodes.\n            merged_children_nodes.append(node)\n\n        return [\n            NodeWithScore(\n                node=KnowledgeGraphNode(\n                    query=query,\n                    entities=list(merged_entities),\n                    relationships=list(merged_relationships.values()),\n                    knowledge_base_ids=merged_knowledge_base_ids,\n                    children=merged_children_nodes,\n                ),\n                score=1,\n            )\n        ]\n"
  },
  {
    "path": "backend/app/rag/retrievers/knowledge_graph/schema.py",
    "content": "import datetime\nimport json\nfrom abc import ABC\nfrom enum import Enum\n\nfrom hashlib import sha256\nfrom typing import Optional, Mapping, Any, List\nfrom llama_index.core.schema import BaseNode, MetadataMode\nfrom pydantic import BaseModel, Field\n\nfrom app.models.entity import EntityType\nfrom app.api.admin_routes.models import KnowledgeBaseDescriptor\n\n# Retriever Config\n\n\nclass MetadataFilterConfig(BaseModel):\n    enabled: bool = True\n    filters: dict[str, Any] = None\n\n\nclass KnowledgeGraphRetrieverConfig(BaseModel):\n    depth: int = 2\n    include_meta: bool = False\n    with_degree: bool = False\n    metadata_filter: Optional[MetadataFilterConfig] = None\n\n\n# Stored Knowledge Graph\n\n\nclass StoredKnowledgeGraphVersion(int, Enum):\n    V1 = 1\n\n\nclass StoredSubGraph(BaseModel):\n    query: Optional[str] = None\n    knowledge_base_id: Optional[int] = None\n    entities: Optional[list[int]] = None\n    relationships: Optional[list[int]] = None\n\n\nclass StoredKnowledgeGraph(StoredSubGraph):\n    \"\"\"\n    StoredKnowledgeGraph represents the structure of the knowledge graph stored in the database.\n    \"\"\"\n\n    # If not provided, it means that the old version of the storage format is used, which only\n    # stores entities and relationships information.\n    version: Optional[int] = StoredKnowledgeGraphVersion.V1\n    knowledge_base_ids: Optional[list[int]] = []\n    subgraphs: Optional[list[\"StoredSubGraph\"]] = None\n\n\n# Retrieved Knowledge Graph\n\n\nclass RetrievedEntity(BaseModel):\n    id: int = Field(description=\"ID of the entity\")\n    knowledge_base_id: Optional[int] = Field(\n        description=\"ID of the knowledge base\", default=None\n    )\n    entity_type: Optional[EntityType] = Field(\n        description=\"Type of the entity\", default=EntityType.original\n    )\n    name: str = Field(description=\"Name of the entity\")\n    description: str = Field(description=\"Description of the entity\", default=\"\")\n    meta: Optional[Mapping[str, Any]] = Field(\n        description=\"Metadata of the entity\", default={}\n    )\n\n    @property\n    def global_id(self) -> str:\n        return f\"{self.knowledge_base_id or 0}-{self.id}\"\n\n    def __hash__(self):\n        return hash(self.global_id)\n\n\nclass RetrievedRelationship(BaseModel):\n    id: int = Field(description=\"ID of the relationship\")\n    knowledge_base_id: int = Field(description=\"ID of the knowledge base\", default=None)\n    source_entity_id: int = Field(description=\"ID of the source entity\")\n    target_entity_id: int = Field(description=\"ID of the target entity\")\n    description: str = Field(description=\"Description of the relationship\")\n    meta: Optional[Mapping[str, Any]] = Field(\n        description=\"Metadata of the relationship\"\n    )\n    rag_description: Optional[str] = Field(\n        description=\"RAG description of the relationship\"\n    )\n    weight: Optional[float] = Field(description=\"Weight of the relationship\")\n    last_modified_at: Optional[datetime.datetime] = Field(\n        description=\"Last modified at of the relationship\", default=None\n    )\n\n    @property\n    def global_id(self) -> str:\n        return f\"{self.knowledge_base_id or 0}-{self.id}\"\n\n    def __hash__(self):\n        return hash(self.global_id)\n\n\nclass RetrievedSubGraph(BaseModel):\n    query: Optional[str | list[str]] = Field(\n        description=\"List of queries that are used to retrieve the knowledge graph\",\n        default=None,\n    )\n    knowledge_base: Optional[KnowledgeBaseDescriptor] = Field(\n        description=\"The knowledge base that the knowledge graph is retrieved from\",\n        default=None,\n    )\n    entities: List[RetrievedEntity] = Field(\n        description=\"List of entities in the knowledge graph\", default_factory=list\n    )\n    relationships: List[RetrievedRelationship] = Field(\n        description=\"List of relationships in the knowledge graph\", default_factory=list\n    )\n\n\nclass RetrievedKnowledgeGraph(RetrievedSubGraph):\n    \"\"\"\n    RetrievedKnowledgeGraph represents the structure of the knowledge graph retrieved\n    from the knowledge base.\n    \"\"\"\n\n    knowledge_bases: Optional[List[KnowledgeBaseDescriptor]] = Field(\n        description=\"List of knowledge bases that the knowledge graph is retrieved from\",\n        default_factory=list,\n    )\n\n    subgraphs: Optional[List[\"RetrievedSubGraph\"]] = Field(\n        description=\"List of subgraphs of the knowledge graph\", default_factory=list\n    )\n\n    def to_subqueries_dict(self) -> dict:\n        \"\"\"\n        For forward compatibility, we need to convert the subgraphs to a dictionary\n        of subqueries and then pass it to the prompt template.\n        \"\"\"\n        subqueries = {}\n        for subgraph in self.subgraphs:\n            if subgraph.query not in subqueries:\n                subqueries[subgraph.query] = {\n                    \"entities\": [e.model_dump() for e in subgraph.entities],\n                    \"relationships\": [r.model_dump() for r in subgraph.relationships],\n                }\n            else:\n                subqueries[subgraph.query][\"entities\"].extend(\n                    [e.model_dump() for e in subgraph.entities]\n                )\n                subqueries[subgraph.query][\"relationships\"].extend(\n                    [r.model_dump() for r in subgraph.relationships]\n                )\n\n        return subqueries\n\n    def to_stored_graph_dict(self) -> dict:\n        subgraph = self.to_stored_graph()\n        return subgraph.model_dump()\n\n    def to_stored_graph(self) -> StoredKnowledgeGraph:\n        return StoredKnowledgeGraph(\n            query=self.query,\n            knowledge_base_id=self.knowledge_base.id if self.knowledge_base else None,\n            knowledge_base_ids=[kb.id for kb in self.knowledge_bases]\n            if self.knowledge_bases\n            else None,\n            entities=[e.id for e in self.entities],\n            relationships=[r.id for r in self.relationships],\n            subgraphs=[s.to_stored_graph() for s in self.subgraphs],\n        )\n\n\nKnowledgeGraphRetrievalResult = RetrievedKnowledgeGraph\n\n\nclass KnowledgeGraphRetriever(ABC):\n    def retrieve_knowledge_graph(self, query_str: str) -> KnowledgeGraphRetrievalResult:\n        raise NotImplementedError\n\n\n# KnowledgeGraphNode\n\nDEFAULT_KNOWLEDGE_GRAPH_TMPL = \"\"\"\nQuery:\n------\n{query}\n\nEntities:\n------\n{entities_str}\n\nRelationships:\n------\n{relationships_str}\n\"\"\"\nDEFAULT_ENTITY_TMPL = \"\"\"\n- Name: {{ name }}\n  Description: {{ description }}\n\"\"\"\nDEFAULT_RELATIONSHIP_TMPL = \"\"\"\n- Description: {{ rag_description }}\n  Weight: {{ weight }}\n  Last Modified At: {{ last_modified_at }}\n  Meta: {{ meta }}\n\"\"\"\n\n\nclass KnowledgeGraphNode(BaseNode):\n    def __init__(self, *args: Any, **kwargs: Any) -> None:\n        super().__init__(*args, **kwargs)\n\n    query: Optional[str] = Field(description=\"Query of the knowledge graph\")\n\n    knowledge_base_id: Optional[int] = Field(\n        description=\"The id of the knowledge base that the knowledge graph belongs to\",\n        default=None,\n    )\n    knowledge_base_ids: Optional[List[int]] = Field(\n        description=\"List of ids of the knowledge base that the knowledge graph belongs to\",\n        default_factory=list,\n    )\n\n    entities: List[RetrievedEntity] = Field(\n        description=\"The list of entities in the knowledge graph\", default_factory=list\n    )\n    relationships: List[RetrievedRelationship] = Field(\n        description=\"The list of relationships in the knowledge graph\",\n        default_factory=list,\n    )\n    children: Optional[List[\"KnowledgeGraphNode\"]] = Field(\n        description=\"The children of the knowledge graph\",\n        default_factory=list,\n    )\n\n    # Template\n\n    knowledge_base_template: str = Field(\n        default=DEFAULT_KNOWLEDGE_GRAPH_TMPL,\n        description=\"The template to render the knowledge graph as string\",\n    )\n    entity_template: str = Field(\n        default=DEFAULT_ENTITY_TMPL,\n        description=\"The template to render the entity list as string\",\n    )\n    relationship_template: str = Field(\n        default=DEFAULT_RELATIONSHIP_TMPL,\n        description=\"The template to render the relationship list as string\",\n    )\n\n    @classmethod\n    def get_type(cls) -> str:\n        return \"KnowledgeGraphNode\"\n\n    def get_content(self, metadata_mode: MetadataMode = MetadataMode.ALL) -> str:\n        return f\"\"\"\n        Query:\n        ------\n        {self.query}\n\n        Entities:\n        ------\n        {self._get_entities_str()}\n        \n        Relationships:\n        ------\n        {self._get_relationships_str()}\n        \"\"\"\n\n    def _get_entities_str(self) -> str:\n        strs = []\n        for entity in self.entities:\n            strs.append(\n                self.entity_template.format(\n                    name=entity.name, description=entity.description\n                )\n            )\n        return \"\\n\\n\".join(strs)\n\n    def _get_relationships_str(self) -> str:\n        strs = []\n        for relationship in self.relationships:\n            strs.append(\n                self.entity_template.format(\n                    rag_description=relationship.rag_description,\n                    weight=relationship.weight,\n                    last_modified_at=relationship.last_modified_at,\n                    meta=json.dumps(relationship.meta, indent=2, ensure_ascii=False),\n                )\n            )\n        return \"\\n\\n\".join(strs)\n\n    def _get_knowledge_graph_str(self) -> str:\n        return self.knowledge_base_template.format(\n            query=self.query,\n            entities_str=self._get_entities_str(),\n            relationships_str=self._get_relationships_str(),\n        )\n\n    def set_content(self, kg: RetrievedKnowledgeGraph):\n        self.query = kg.query\n        self.knowledge_base_id = kg.knowledge_base.id if kg.knowledge_base else None\n        self.knowledge_base_ids = []\n        self.entities = kg.entities\n        self.relationships = kg.relationships\n        self.children = [\n            KnowledgeGraphNode(\n                query=subgraph.query,\n                knowledge_base_id=subgraph.knowledge_base.id\n                if subgraph.knowledge_base\n                else None,\n                entities=subgraph.entities,\n                relationships=subgraph.relationships,\n            )\n            for subgraph in kg.subgraphs\n        ]\n\n    @property\n    def hash(self) -> str:\n        kg_identity = self._get_knowledge_graph_str().encode(\"utf-8\")\n        return str(sha256(kg_identity).hexdigest())\n"
  },
  {
    "path": "backend/app/rag/retrievers/knowledge_graph/simple_retriever.py",
    "content": "from typing import Optional, List\n\nfrom sqlmodel import Session\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.callbacks import CallbackManager\nfrom llama_index.core.retrievers import BaseRetriever\nfrom llama_index.core.schema import NodeWithScore\n\nfrom app.models.chunk import get_kb_chunk_model\nfrom app.models.entity import get_kb_entity_model\nfrom app.models.relationship import get_kb_relationship_model\nfrom app.rag.retrievers.knowledge_graph.schema import (\n    KnowledgeGraphRetrieverConfig,\n    KnowledgeGraphRetrievalResult,\n    KnowledgeGraphNode,\n    KnowledgeGraphRetriever,\n)\nfrom app.rag.knowledge_base.config import get_kb_embed_model, get_kb_dspy_llm\nfrom app.rag.indices.knowledge_graph.graph_store import TiDBGraphStore\nfrom app.repositories import knowledge_base_repo\n\n\nclass KnowledgeGraphSimpleRetriever(BaseRetriever, KnowledgeGraphRetriever):\n    def __init__(\n        self,\n        db_session: Session,\n        knowledge_base_id: int,\n        config: KnowledgeGraphRetrieverConfig,\n        callback_manager: Optional[CallbackManager] = CallbackManager([]),\n        **kwargs,\n    ):\n        super().__init__(callback_manager, **kwargs)\n        self.config = config\n        self._callback_manager = callback_manager\n        self.knowledge_base = knowledge_base_repo.must_get(\n            db_session, knowledge_base_id\n        )\n        self.embed_model = get_kb_embed_model(db_session, self.knowledge_base)\n        self.embed_model.callback_manager = callback_manager\n        self.chunk_db_model = get_kb_chunk_model(self.knowledge_base)\n        self.entity_db_model = get_kb_entity_model(self.knowledge_base)\n        self.relationship_db_model = get_kb_relationship_model(self.knowledge_base)\n        # TODO: remove it\n        dspy_lm = get_kb_dspy_llm(db_session, self.knowledge_base)\n        self._kg_store = TiDBGraphStore(\n            knowledge_base=self.knowledge_base,\n            dspy_lm=dspy_lm,\n            session=db_session,\n            embed_model=self.embed_model,\n            entity_db_model=self.entity_db_model,\n            relationship_db_model=self.relationship_db_model,\n            chunk_db_model=self.chunk_db_model,\n        )\n\n    def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:\n        metadata_filters = {}\n        if self.config.metadata_filter and self.config.metadata_filter.enabled:\n            metadata_filters = self.config.metadata_filter.filters\n\n        entities, relationships = self._kg_store.retrieve_with_weight(\n            query_bundle.query_str,\n            embedding=[],\n            depth=self.config.depth,\n            include_meta=self.config.include_meta,\n            with_degree=self.config.with_degree,\n            relationship_meta_filters=metadata_filters,\n        )\n        return [\n            NodeWithScore(\n                node=KnowledgeGraphNode(\n                    query=query_bundle.query_str,\n                    knowledge_base_id=self.knowledge_base.id,\n                    entities=entities,\n                    relationships=relationships,\n                ),\n                score=1,\n            )\n        ]\n\n    def retrieve_knowledge_graph(\n        self, query_text: str\n    ) -> KnowledgeGraphRetrievalResult:\n        nodes_with_score = self._retrieve(QueryBundle(query_text))\n        if len(nodes_with_score) == 0:\n            return KnowledgeGraphRetrievalResult()\n        node: KnowledgeGraphNode = nodes_with_score[0].node  # type:ignore\n        return KnowledgeGraphRetrievalResult(\n            query=node.query,\n            knowledge_base=self.knowledge_base.to_descriptor(),\n            entities=node.entities,\n            relationships=node.relationships,\n            subgraphs=[],\n        )\n"
  },
  {
    "path": "backend/app/rag/retrievers/multiple_knowledge_base.py",
    "content": "from abc import abstractmethod\n\nimport dspy\n\nfrom typing import List, Optional, Dict, Tuple\n\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.async_utils import run_async_tasks\nfrom llama_index.core.base.base_retriever import BaseRetriever\nfrom llama_index.core.callbacks import CallbackManager\nfrom llama_index.core.llms import LLM\nfrom llama_index.core.schema import NodeWithScore\nfrom pydantic import BaseModel\nfrom sqlmodel import Session\n\nfrom app.core.config import settings\nfrom app.rag.question_gen.query_decomposer import QueryDecomposer\nfrom app.rag.types import MyCBEventType\nfrom app.rag.llms.dspy import get_dspy_lm_by_llama_llm\n\n\nclass FusionRetrievalBaseConfig(BaseModel):\n    llm_id: Optional[int] = None\n    knowledge_base_ids: List[int]\n    use_query_decompose: Optional[bool] = None\n\n\nclass MultiKBFusionRetriever(BaseRetriever):\n    def __init__(\n        self,\n        retrievers: List[BaseRetriever],\n        db_session: Session,\n        llm: LLM,\n        dspy_lm: Optional[dspy.LM] = None,\n        use_query_decompose: bool = True,\n        callback_manager: Optional[CallbackManager] = CallbackManager([]),\n        **kwargs,\n    ):\n        super().__init__(callback_manager, **kwargs)\n        self._use_query_decompose = use_query_decompose\n        self._db_session = db_session\n        self._callback_manager = callback_manager\n\n        # Setup query decomposer.\n        self._dspy_lm = dspy_lm or get_dspy_lm_by_llama_llm(llm)\n        self._query_decomposer = QueryDecomposer(\n            dspy_lm=self._dspy_lm,\n            complied_program_path=settings.COMPLIED_INTENT_ANALYSIS_PROGRAM_PATH,\n        )\n\n        # Setup multiple knowledge base selector.\n        self._retrievers = retrievers\n\n    def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:\n        if self._use_query_decompose:\n            queries = self._gen_sub_queries(query_bundle)\n        else:\n            queries = [query_bundle]\n\n        with self.callback_manager.event(\n            MyCBEventType.RUN_SUB_QUERIES, payload={\"queries\": queries}\n        ):\n            tasks, task_queries = [], []\n\n            for query in queries:\n                for i, retriever in enumerate(self._retrievers):\n                    tasks.append(retriever.aretrieve(query.query_str))\n                    task_queries.append((query.query_str, i))\n\n            task_results = run_async_tasks(tasks)\n            results = {}\n            for query_tuple, query_result in zip(task_queries, task_results):\n                results[query_tuple] = query_result\n\n        return self._fusion(query_bundle.query_str, results)\n\n    def _gen_sub_queries(self, query_bundle: QueryBundle) -> List[QueryBundle]:\n        queries = self._query_decomposer.decompose(query_bundle.query_str)\n        return [QueryBundle(r.question) for r in queries.questions]\n\n    @abstractmethod\n    def _fusion(\n        self, query: str, results: Dict[Tuple[str, int], List[NodeWithScore]]\n    ) -> List[NodeWithScore]:\n        \"\"\"fusion method\"\"\"\n"
  },
  {
    "path": "backend/app/rag/semantic_cache/__init__.py",
    "content": "from .base import SemanticCacheManager, SemanticItem\n\n__all__ = [\"SemanticCacheManager\", \"SemanticItem\"]\n"
  },
  {
    "path": "backend/app/rag/semantic_cache/base.py",
    "content": "import time\nimport dspy\nimport logging\nfrom typing import List, Literal, Optional\nfrom pydantic import BaseModel, Field\nfrom sqlmodel import Session, select, func\n\nfrom llama_index.core.embeddings.utils import EmbedType, resolve_embed_model\nfrom llama_index.embeddings.openai import OpenAIEmbedding, OpenAIEmbeddingModelType\n\nfrom app.models import SemanticCache\n\nlogger = logging.getLogger(__name__)\n\n\nclass SemanticItem(BaseModel):\n    \"\"\"A single question-answer pair for semantic search.\"\"\"\n\n    question: str = Field(description=\"The question in the question-answer pair.\")\n    answer: str = Field(description=\"The answer corresponding to the question.\")\n\n\nclass SemanticCandidate(BaseModel):\n    \"\"\"A single question pair for semantic search.\"\"\"\n\n    question: str = Field(description=\"The question in the question-answer pair.\")\n\n\nclass SemanticGroup(BaseModel):\n    \"\"\"A collection of question-answer pairs for semantic search.\"\"\"\n\n    items: List[SemanticCandidate] = Field(description=\"A list of questions.\")\n\n\nclass QASemanticOutput(BaseModel):\n    \"\"\"The output of the semantic search operation.\"\"\"\n\n    match_type: Literal[\"exact_match\", \"no_match\", \"similar_match\"] = Field(\n        description=(\n            \"The type of match found during the search. Use 'exact_match' if the query semantically matches the same \"\n            \"question, meaning it is asking about the exact same topic. \"\n            \"For all other cases, classify the match as 'no_match' or 'similar_match'.\"\n        )\n    )\n    items: List[SemanticCandidate] = Field(\n        description=(\n            \"The question-answer pair that matches the query. \"\n            \"If the match_type is 'no_match', return an empty list. \"\n            \"If the match_type is 'similar_match', return the most relevant questions.\"\n            \"If the match_type is 'exact_match', return the question that is semantically identical to the query.\"\n        )\n    )\n\n\nclass QASemanticSearchModule(dspy.Signature):\n    \"\"\"\n    This module performs a semantic search to identify the best matching question-answer pairs from a given set of candidates.\n\n    The semantic search process includes:\n    - Comparing the query against a set of candidate question-answer pairs.\n    - Returning an 'exact_match' if the query semantically matches a candidate question, meaning it is asking the exact same question.\n    - Returning a 'similar_match' if the query is related to a candidate question but does not semantically match exactly.\n    - Returning a 'no_match' if the query does not match any candidate question.\n\n    Note: The output items must be selected from the provided candidates.\n    \"\"\"\n\n    query: str = dspy.InputField(\n        description=\"The query string to search for within the candidates.\"\n    )\n    candidats: SemanticGroup = dspy.InputField(\n        description=\"A collection of frequently asked questions to search through.\"\n    )\n\n    output: QASemanticOutput = dspy.OutputField(\n        description=\"The question that best matches the query string. \"\n    )\n\n\nclass SemanticSearchProgram(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.prog = dspy.TypedChainOfThought(QASemanticSearchModule)\n\n    def forward(self, query: str, candidats: SemanticGroup):\n        with dspy.settings.context(lm=self.dspy_lm):\n            return self.prog(query=query, candidats=candidats)\n\n\nclass SemanticCacheManager:\n    def __init__(\n        self,\n        dspy_llm: dspy.LM,\n        embed_model: Optional[EmbedType] = None,\n        complied_sc_search_program_path: Optional[str] = None,\n    ):\n        self._dspy_lm = dspy_llm\n        if embed_model:\n            self._embed_model = resolve_embed_model(embed_model)\n        else:\n            self._embed_model = OpenAIEmbedding(\n                model=OpenAIEmbeddingModelType.TEXT_EMBED_3_SMALL\n            )\n        self.prog = SemanticSearchProgram(dspy_lm=dspy_llm)\n        if complied_sc_search_program_path is not None:\n            self.prog.load(complied_sc_search_program_path)\n\n    def get_query_embedding(self, query: str):\n        return self._embed_model.get_query_embedding(query)\n\n    def add_cache(\n        self,\n        session: Session,\n        item: SemanticItem,\n        namespace: str,\n        metadata: Optional[dict] = None,\n    ):\n        if metadata is None:\n            metadata = {}\n        metadata[\"namespace\"] = namespace\n\n        object = SemanticCache(\n            query=item.question,\n            query_vec=self.get_query_embedding(item.question),\n            value=item.answer,\n            value_vec=self.get_query_embedding(item.answer),\n            meta=metadata,\n        )\n        session.add(object)\n        session.commit()\n\n    def search(\n        self, session: Session, query: str, namespace: Optional[str] = None\n    ) -> QASemanticOutput:\n        start_time = time.time()\n        embedding = self.get_query_embedding(query)\n        logger.debug(\n            f\"[search_semantic_cache] Get query embedding {time.time() - start_time:.2f} seconds\"\n        )\n        start_time = time.time()\n\n        sql = (\n            select(\n                SemanticCache,\n                SemanticCache.query_vec.cosine_distance(embedding).label(\"distance\"),\n            )\n            .having(SemanticCache.query_vec.cosine_distance(embedding) < 0.5)\n            .order_by(\"distance\")\n            .limit(20)\n        )\n        if namespace:\n            sql = sql.where(\n                func.json_extract(SemanticCache.meta, \"$.namespace\") == namespace\n            )\n\n        results = session.execute(sql).all()\n        candidates = SemanticGroup(\n            items=[\n                SemanticCandidate(\n                    question=result.SemanticCache.query,\n                )\n                for result in results\n            ]\n        )\n        logger.debug(\n            f\"[search_semantic_cache] Search semantic cache {time.time() - start_time:.2f} seconds\"\n        )\n        start_time = time.time()\n\n        if len(candidates.items) == 0:\n            return {\n                \"match_type\": \"no_match\",\n                \"items\": [],\n            }\n\n        pred = self.prog(query=query, candidats=candidates)\n        logger.debug(\n            f\"[search_semantic_cache] Predict semantic cache {time.time() - start_time:.2f} seconds\"\n        )\n        logger.debug(f\"[search_semantic_cache] Predict semantic cache {pred.output}\")\n\n        # filter the matched items and it's metadata\n        matched_items = []\n        for item in pred.output.items:\n            question = item.question\n            # find the matched item in the results\n            for result in results:\n                if result.SemanticCache.query == question:\n                    matched_items.append(\n                        {\n                            \"question\": result.SemanticCache.query,\n                            \"answer\": result.SemanticCache.value,\n                            \"meta\": result.SemanticCache.meta,\n                        }\n                    )\n                    break\n\n        return {\"match_type\": pred.output.match_type, \"items\": matched_items}\n"
  },
  {
    "path": "backend/app/rag/types.py",
    "content": "import enum\n\n\n# Langfuse needs an enum class for event types,\n# but the CBEventType in llama-index does not have sufficient types.\nclass MyCBEventType(str, enum.Enum):\n    CHUNKING = \"chunking\"\n    NODE_PARSING = \"node_parsing\"\n    EMBEDDING = \"embedding\"\n    LLM = \"llm\"\n    QUERY = \"query\"\n    RETRIEVE = \"retrieve\"\n    SYNTHESIZE = \"synthesize\"\n    TREE = \"tree\"\n    SUB_QUESTION = \"sub_question\"\n    TEMPLATING = \"templating\"\n    FUNCTION_CALL = \"function_call\"\n    RERANKING = \"reranking\"\n    EXCEPTION = \"exception\"\n    AGENT_STEP = \"agent_step\"\n    CLARIFYING_QUESTION = \"clarifying_question\"\n    CONDENSE_QUESTION = \"condense_question\"\n    REFINE_QUESTION = \"refine_question\"\n    RETRIEVE_FROM_GRAPH = \"retrieve_from_graph\"\n    INTENT_DECOMPOSITION = \"intent_decomposition\"\n    GRAPH_SEMANTIC_SEARCH = \"graph_semantic_search\"\n    SELECT_KNOWLEDGE_BASE = \"select_knowledge_base\"\n    RUN_SUB_QUERIES = \"run_sub_queries\"\n\n\n# Chat stream response event types\nclass ChatEventType(int, enum.Enum):\n    # Following vercel ai sdk's event type\n    # https://github.com/vercel/ai/blob/84871281ab5a2c080e3f8e18d02cd09c7e1691c4/packages/ui-utils/src/stream-parts.ts#L368\n    TEXT_PART = 0\n    DATA_PART = 2\n    ERROR_PART = 3\n    MESSAGE_ANNOTATIONS_PART = 8\n\n\nclass ChatMessageSate(int, enum.Enum):\n    TRACE = 0\n    SOURCE_NODES = 1\n    KG_RETRIEVAL = 2\n    REFINE_QUESTION = 3\n    SEARCH_RELATED_DOCUMENTS = 4\n    GENERATE_ANSWER = 5\n    FINISHED = 9\n"
  },
  {
    "path": "backend/app/rag/utils.py",
    "content": "import re\nfrom typing import Tuple, Dict\n\n\ndef _parse_response_format(response_format_str: str) -> Dict[str, str]:\n    \"\"\"\n    Parses the requirements string into a dictionary.\n\n    Args:\n        req_str (str): The requirements string.\n\n    Returns:\n        Dict[str, str]: A dictionary of parsed requirements.\n    \"\"\"\n    requirements = {}\n    parts = re.split(r\",\\s*(?=\\w[\\w\\s]*:\\s*[^,()]+)\", response_format_str)\n    for part in parts:\n        if \":\" in part:\n            key, value = part.split(\":\", 1)\n            requirements[key.strip()] = value.strip()\n        else:\n            requirements[part.strip()] = None\n    return requirements\n\n\ndef parse_goal_response_format(goal: str) -> Tuple[str, Dict[str, str]]:\n    \"\"\"\n    Extracts the main goal and its requirements from the input string.\n\n    Args:\n        goal (str): The input question string with optional requirements.\n\n    Returns:\n        Tuple[str, Dict[str, str]]: A tuple containing the main goal and a dictionary of requirements.\n    \"\"\"\n    # Initialize\n    clean_goal = goal.strip()\n    response_format = None\n\n    # Remove starting quote if present\n    if clean_goal.startswith('\"'):\n        clean_goal = clean_goal[1:].strip()\n\n    # Remove ending quote if present\n    if clean_goal.endswith('\"'):\n        clean_goal = clean_goal[:-1].strip()\n\n    # Function to find the last balanced parentheses by reverse traversal\n    def extract_last_parentheses(s: str) -> Tuple[str, str]:\n        \"\"\"\n        Extracts the last balanced parentheses content from the string by traversing from the end.\n\n        Args:\n            s (str): The input string.\n\n        Returns:\n            Tuple[str, str]: A tuple containing the string without the last parentheses\n                             and the content within the last parentheses.\n        \"\"\"\n        stack = []\n        last_close = s.rfind(\")\")\n        if last_close == -1:\n            return s, \"\"  # No closing parenthesis found\n\n        for i in range(last_close, -1, -1):\n            if s[i] == \")\":\n                stack.append(i)\n            elif s[i] == \"(\":\n                if stack:\n                    stack.pop()\n                    if not stack:\n                        # Found the matching opening parenthesis\n                        return s[:i].strip(), s[i + 1 : last_close].strip()\n        return s, \"\"  # No matching opening parenthesis found\n\n    # Extract the last parentheses content\n    clean_goal, req_str = extract_last_parentheses(clean_goal)\n\n    if req_str:\n        response_format = _parse_response_format(req_str)\n    else:\n        response_format = {}\n\n    return clean_goal, response_format\n"
  },
  {
    "path": "backend/app/repositories/__init__.py",
    "content": "# flake8: noqa\nfrom .staff_action_log import staff_action_repo\nfrom .chat_engine import chat_engine_repo\nfrom .chat import chat_repo\nfrom .document import document_repo\nfrom .chunk import ChunkRepo\nfrom .data_source import data_source_repo\nfrom .knowledge_base import knowledge_base_repo\nfrom .feedback import feedback_repo\nfrom .llm import llm_repo\nfrom .embedding_model import embedding_model_repo\n"
  },
  {
    "path": "backend/app/repositories/base_repo.py",
    "content": "from sqlmodel import Session, SQLModel, select\n\n\nclass BaseRepo:\n    model_cls: SQLModel\n\n    def get(self, session: Session, id: int):\n        return session.get(self.model_cls, id)\n\n    def get_all(self, session: Session):\n        return session.exec(select(self.model_cls)).all()\n\n    def create(self, session: Session, obj: SQLModel):\n        session.add(obj)\n        session.commit()\n        session.refresh(obj)\n        return obj\n"
  },
  {
    "path": "backend/app/repositories/chat.py",
    "content": "import enum\nfrom uuid import UUID\nfrom typing import Optional, List, Dict, Any\nfrom datetime import datetime, UTC, date, timedelta\nfrom collections import defaultdict\n\nfrom sqlmodel import select, Session, or_, func, case, desc, col\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\n\nfrom app.models import Chat, User, ChatMessage, ChatUpdate, ChatFilters, ChatOrigin\nfrom app.repositories.base_repo import BaseRepo\nfrom app.exceptions import ChatNotFound, ChatMessageNotFound\n\n\nclass ChatRepo(BaseRepo):\n    model_cls = Chat\n\n    def paginate(\n        self,\n        session: Session,\n        user: User | None,\n        browser_id: str | None,\n        filters: ChatFilters,\n        params: Params | None = Params(),\n    ) -> Page[Chat]:\n        query = select(Chat).where(Chat.deleted_at == None)\n        if user:\n            if not user.is_superuser:\n                query = query.where(\n                    or_(Chat.user_id == user.id, Chat.browser_id == browser_id)\n                )\n        else:\n            query = query.where(Chat.browser_id == browser_id, Chat.user_id == None)\n\n        # filters\n        if filters.created_at_start:\n            query = query.where(Chat.created_at >= filters.created_at_start)\n        if filters.created_at_end:\n            query = query.where(Chat.created_at <= filters.created_at_end)\n        if filters.updated_at_start:\n            query = query.where(Chat.updated_at >= filters.updated_at_start)\n        if filters.updated_at_end:\n            query = query.where(Chat.updated_at <= filters.updated_at_end)\n        if filters.chat_origin:\n            query = query.where(col(Chat.origin).contains(filters.chat_origin))\n        # if filters.user_id:\n        #     query = query.where(Chat.user_id == filters.user_id)\n        if filters.engine_id:\n            query = query.where(Chat.engine_id == filters.engine_id)\n\n        query = query.order_by(Chat.created_at.desc())\n        return paginate(session, query, params)\n\n    def get(\n        self,\n        session: Session,\n        chat_id: UUID,\n    ) -> Optional[Chat]:\n        return session.exec(\n            select(Chat).where(Chat.id == chat_id, Chat.deleted_at == None)\n        ).first()\n\n    def must_get(\n        self,\n        session: Session,\n        chat_id: UUID,\n    ) -> Chat:\n        chat = self.get(session, chat_id)\n        if not chat:\n            raise ChatNotFound(chat_id)\n        return chat\n\n    def update(\n        self,\n        session: Session,\n        chat: Chat,\n        chat_update: ChatUpdate,\n    ) -> Chat:\n        for field, value in chat_update.model_dump(exclude_unset=True).items():\n            if isinstance(value, enum.Enum):\n                value = value.value\n            setattr(chat, field, value)\n        session.commit()\n        session.refresh(chat)\n        return chat\n\n    def delete(self, session: Session, chat: Chat):\n        chat.deleted_at = datetime.now(UTC)\n        session.add(chat)\n        session.commit()\n\n    def get_last_message(self, session: Session, chat: Chat) -> Optional[ChatMessage]:\n        return session.exec(\n            select(ChatMessage)\n            .where(ChatMessage.chat_id == chat.id)\n            .order_by(ChatMessage.ordinal.desc())\n        ).first()\n\n    def get_messages(\n        self,\n        session: Session,\n        chat: Chat,\n    ) -> List[ChatMessage]:\n        return session.exec(\n            select(ChatMessage)\n            .where(ChatMessage.chat_id == chat.id)\n            .order_by(ChatMessage.ordinal.asc())\n        ).all()\n\n    def get_message(\n        self,\n        session: Session,\n        chat_message_id: int,\n    ) -> Optional[ChatMessage]:\n        return session.exec(\n            select(ChatMessage).where(\n                ChatMessage.id == chat_message_id,\n                ChatMessage.chat.has(Chat.deleted_at == None),\n            )\n        ).first()\n\n    def must_get_message(\n        self,\n        session: Session,\n        chat_message_id: int,\n    ):\n        msg = self.get_message(session, chat_message_id)\n        if not msg:\n            raise ChatMessageNotFound(chat_message_id)\n        return msg\n\n    def create_message(\n        self,\n        session: Session,\n        chat: Chat,\n        chat_message: ChatMessage,\n    ) -> ChatMessage:\n        if not chat_message.ordinal:\n            last_message = self.get_last_message(session, chat)\n            if last_message:\n                ordinal = last_message.ordinal + 1\n            else:\n                ordinal = 1\n            chat_message.ordinal = ordinal\n        chat_message.chat_id = chat.id\n        chat_message.user_id = chat.user_id\n        session.add(chat_message)\n        session.commit()\n        session.refresh(chat_message)\n        return chat_message\n\n    def find_recent_assistant_messages_by_goal(\n        self, session: Session, metadata: Dict[str, Any], days: int = 15\n    ) -> List[ChatMessage]:\n        \"\"\"\n        Search for 'assistant' role chat messages with a specific goal within the recent days.\n\n        Args:\n            session (Session): The database session.\n            goal (str): The goal value to match in meta.goal.\n            days (int, optional): Number of recent days to include in the search. Defaults to 2.\n\n        Returns:\n            List[ChatMessage]: A list of ChatMessage instances that match the criteria.\n        \"\"\"\n        # Calculate the cutoff datetime based on the current UTC time minus the specified number of days\n        cutoff = datetime.now(UTC) - timedelta(days=days)\n\n        query = select(ChatMessage).where(\n            ChatMessage.role == \"assistant\",\n            ChatMessage.created_at >= cutoff,\n            ChatMessage.is_best_answer.is_(True),  # Use is_ for boolean fields\n        )\n\n        # Dynamically add filters for each key-value pair in metadata\n        for key, value in metadata.items():\n            json_path = f\"$.{key}\"\n            filter_condition = (\n                func.JSON_UNQUOTE(func.JSON_EXTRACT(ChatMessage.meta, json_path))\n                == value\n            )\n            query = query.where(filter_condition)\n\n        # Order by created_at in descending order\n        query = query.order_by(desc(ChatMessage.created_at))\n\n        return session.exec(query).all()\n\n    def find_best_answer_for_question(\n        self, session: Session, user_question: str\n    ) -> List[ChatMessage]:\n        \"\"\"Find best answer messages for a specific user question.\n\n        This method finds assistant messages that:\n        1. Are marked as best answers\n        2. Are responses (ordinal=2) to the exact user question\n        3. Were created within the last 15 days\n\n        Args:\n            session: Database session\n            user_question: The exact question text to search for\n\n        Returns:\n            List of matching assistant messages marked as best answers\n        \"\"\"\n        cutoff = datetime.now(UTC) - timedelta(days=15)\n\n        # First, get all best answers from assistant (using the is_best_answer index)\n        best_answer_chat_ids = select(ChatMessage.chat_id).where(\n            ChatMessage.is_best_answer == 1,  # Using the index for efficiency\n            ChatMessage.role == \"assistant\",\n            ChatMessage.ordinal == 2,\n            ChatMessage.created_at >= cutoff,\n        )\n\n        # Then, find user questions that match our target question and belong to chats with best answers\n        matching_chat_ids = select(ChatMessage.chat_id).where(\n            ChatMessage.chat_id.in_(best_answer_chat_ids),\n            ChatMessage.role == \"user\",\n            ChatMessage.ordinal == 1,\n            ChatMessage.content == user_question.strip(),\n        )\n\n        # Finally, get the best answers that correspond to the matching user questions\n        query = select(ChatMessage).where(\n            ChatMessage.is_best_answer == 1,\n            ChatMessage.role == \"assistant\",\n            ChatMessage.ordinal == 2,\n            ChatMessage.chat_id.in_(matching_chat_ids),\n        )\n\n        query = query.order_by(desc(ChatMessage.created_at))\n\n        # Execute the query and return all results\n        return session.exec(query).all()\n\n    def chat_trend_by_user(\n        self, session: Session, start_date: date, end_date: date\n    ) -> List[dict]:\n        start_at = datetime.combine(start_date, datetime.min.time(), UTC)\n        end_at = datetime.combine(end_date, datetime.max.time(), UTC)\n        query = (\n            select(\n                func.date(Chat.created_at).label(\"date\"),\n                func.sum(case((Chat.user_id.isnot(None), 1), else_=0)).label(\"user\"),\n                func.sum(case((Chat.user_id.is_(None), 1), else_=0)).label(\"anonymous\"),\n            )\n            .where(Chat.created_at.between(start_at, end_at))\n            .group_by(func.date(Chat.created_at))\n            .order_by(func.date(Chat.created_at))\n        )\n        result = session.exec(query)\n        return [\n            {\"date\": row.date, \"user\": int(row.user), \"anonymous\": int(row.anonymous)}\n            for row in result\n        ]\n\n    def chat_trend_by_origin(\n        self, session: Session, start_date: date, end_date: date\n    ) -> List[dict]:\n        start_at = datetime.combine(start_date, datetime.min.time(), UTC)\n        end_at = datetime.combine(end_date, datetime.max.time(), UTC)\n        query = (\n            select(\n                func.count(Chat.id).label(\"count\"),\n                func.date(Chat.created_at).label(\"date\"),\n                Chat.origin,\n            )\n            .where(Chat.created_at.between(start_at, end_at))\n            .group_by(func.date(Chat.created_at), Chat.origin)\n            .order_by(func.date(Chat.created_at))\n        )\n        result = session.exec(query)\n\n        date_origin_counts = defaultdict(lambda: defaultdict(int))\n        origins = set()\n\n        for row in result:\n            date_origin_counts[row.date][row.origin] = row.count\n            origins.add(row.origin)\n\n        stats = []\n        for d, origin_counts in date_origin_counts.items():\n            stat = {\"date\": d}\n            for origin in origins:\n                stat[origin] = origin_counts[origin]\n            stats.append(stat)\n\n        stats.sort(key=lambda x: x[\"date\"])\n        return stats\n\n    def list_chat_origins(\n        self,\n        db_session: Session,\n        search: Optional[str] = None,\n        params: Params = Params(),\n    ) -> Page[ChatOrigin]:\n        query = (\n            select(Chat.origin, func.count(Chat.id).label(\"chats\"))\n            .where(Chat.deleted_at == None)\n            .where(Chat.origin != None)\n            .where(Chat.origin != \"\")\n        )\n\n        if search:\n            query = query.where(Chat.origin.ilike(f\"%{search}%\"))\n\n        query = query.group_by(Chat.origin).order_by(desc(\"chats\"))\n\n        return paginate(\n            db_session,\n            query,\n            params,\n            transformer=lambda chats: [\n                ChatOrigin(origin=chat.origin, chats=chat.chats) for chat in chats\n            ],\n        )\n\n\nchat_repo = ChatRepo()\n"
  },
  {
    "path": "backend/app/repositories/chat_engine.py",
    "content": "from typing import Optional\nfrom datetime import datetime, UTC\n\nfrom sqlalchemy import func\nfrom sqlmodel import select, Session, update\nfrom app.exceptions import ChatEngineNotFound\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlalchemy.orm.attributes import flag_modified\n\nfrom app.models.chat_engine import ChatEngine, ChatEngineUpdate\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass ChatEngineRepo(BaseRepo):\n    model_cls = ChatEngine\n\n    def get(\n        self, session: Session, id: int, need_public: bool = False\n    ) -> Optional[ChatEngine]:\n        query = select(ChatEngine).where(\n            ChatEngine.id == id, ChatEngine.deleted_at == None\n        )\n        if need_public:\n            query = query.where(ChatEngine.is_public == True)\n        return session.exec(query).first()\n\n    def must_get(\n        self, session: Session, id: int, need_public: bool = False\n    ) -> ChatEngine:\n        chat_engine = self.get(session, id, need_public)\n        if chat_engine is None:\n            raise ChatEngineNotFound(id)\n        return chat_engine\n\n    def paginate(\n        self,\n        session: Session,\n        params: Params | None = Params(),\n        need_public: bool = False,\n    ) -> Page[ChatEngine]:\n        query = select(ChatEngine).where(ChatEngine.deleted_at == None)\n        if need_public:\n            query = query.where(ChatEngine.is_public == True)\n        # Make sure the default engine is always on top\n        query = query.order_by(ChatEngine.is_default.desc(), ChatEngine.name)\n        return paginate(session, query, params)\n\n    def get_default_engine(self, session: Session) -> Optional[ChatEngine]:\n        return session.exec(\n            select(ChatEngine).where(\n                ChatEngine.is_default == True, ChatEngine.deleted_at == None\n            )\n        ).first()\n\n    def has_default(self, session: Session) -> bool:\n        return (\n            session.scalar(\n                select(func.count(ChatEngine.id)).where(\n                    ChatEngine.is_default == True, ChatEngine.deleted_at == None\n                )\n            )\n            > 0\n        )\n\n    def get_engine_by_name(self, session: Session, name: str) -> Optional[ChatEngine]:\n        return session.exec(\n            select(ChatEngine).where(\n                ChatEngine.name == name, ChatEngine.deleted_at == None\n            )\n        ).first()\n\n    def create(self, session: Session, obj: ChatEngine):\n        if obj.is_default:\n            session.exec(\n                update(ChatEngine)\n                .where(ChatEngine.id != obj.id)\n                .values(is_default=False)\n            )\n        session.add(obj)\n        session.commit()\n        session.refresh(obj)\n        return obj\n\n    def update(\n        self,\n        session: Session,\n        chat_engine: ChatEngine,\n        chat_engine_update: ChatEngineUpdate,\n    ) -> ChatEngine:\n        set_default = chat_engine_update.is_default\n        for field, value in chat_engine_update.model_dump(exclude_unset=True).items():\n            setattr(chat_engine, field, value)\n            flag_modified(chat_engine, field)\n\n        if set_default:\n            session.exec(\n                update(ChatEngine)\n                .where(ChatEngine.id != chat_engine.id)\n                .values(is_default=False)\n            )\n        session.commit()\n        session.refresh(chat_engine)\n        return chat_engine\n\n    def delete(self, session: Session, chat_engine: ChatEngine) -> ChatEngine:\n        chat_engine.deleted_at = datetime.now(UTC)\n        session.commit()\n        session.refresh(chat_engine)\n        return chat_engine\n\n\nchat_engine_repo = ChatEngineRepo()\n"
  },
  {
    "path": "backend/app/repositories/chunk.py",
    "content": "from typing import Type\n\nfrom sqlalchemy import func, delete\nfrom sqlmodel import Session, select, SQLModel\nfrom app.repositories.base_repo import BaseRepo\n\nfrom app.models import (\n    Document as DBDocument,\n)\n\n\nclass ChunkRepo(BaseRepo):\n    def __init__(self, chunk_model: Type[SQLModel]):\n        self.model_cls = chunk_model\n\n    def document_exists_chunks(self, session: Session, document_id: int) -> bool:\n        return (\n            session.exec(\n                select(self.model_cls).where(self.model_cls.document_id == document_id)\n            ).first()\n            is not None\n        )\n\n    def get_documents_by_chunk_ids(\n        self, session: Session, chunk_ids: list[str]\n    ) -> list[DBDocument]:\n        stmt = select(DBDocument).where(\n            DBDocument.id.in_(\n                select(self.model_cls.document_id).where(\n                    self.model_cls.id.in_(chunk_ids),\n                )\n            ),\n        )\n        return list(session.exec(stmt).all())\n\n    def get_document_chunks(self, session: Session, document_id: int):\n        return session.exec(\n            select(self.model_cls).where(self.model_cls.document_id == document_id)\n        ).all()\n\n    def fetch_by_document_ids(self, session: Session, document_ids: list[int]):\n        return session.exec(\n            select(self.model_cls).where(self.model_cls.document_id.in_(document_ids))\n        ).all()\n\n    def count(self, session: Session):\n        return session.scalar(select(func.count(self.model_cls.id)))\n\n    def delete_by_datasource(self, session: Session, datasource_id: int):\n        doc_ids_subquery = select(DBDocument.id).where(\n            DBDocument.data_source_id == datasource_id\n        )\n        stmt = delete(self.model_cls).where(\n            self.model_cls.document_id.in_(doc_ids_subquery)\n        )\n        session.exec(stmt)\n\n    def delete_by_document(self, session: Session, document_id: int):\n        stmt = delete(self.model_cls).where(self.model_cls.document_id == document_id)\n        session.exec(stmt)\n"
  },
  {
    "path": "backend/app/repositories/data_source.py",
    "content": "from typing import Optional\nfrom datetime import datetime, UTC\n\nfrom sqlmodel import select, Session\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\n\nfrom app.models import (\n    DataSource,\n)\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass DataSourceRepo(BaseRepo):\n    model_cls = DataSource\n\n    def paginate(\n        self,\n        session: Session,\n        params: Params | None = Params(),\n    ) -> Page[DataSource]:\n        query = (\n            select(DataSource)\n            .where(DataSource.deleted_at == None)\n            .order_by(DataSource.created_at.desc())\n        )\n        return paginate(session, query, params)\n\n    def get(\n        self,\n        session: Session,\n        data_source_id: int,\n    ) -> Optional[DataSource]:\n        return session.exec(\n            select(DataSource).where(\n                DataSource.id == data_source_id, DataSource.deleted_at == None\n            )\n        ).first()\n\n    def delete(self, session: Session, data_source: DataSource) -> None:\n        data_source.deleted_at = datetime.now(UTC)\n        session.add(data_source)\n        session.commit()\n\n\ndata_source_repo = DataSourceRepo()\n"
  },
  {
    "path": "backend/app/repositories/document.py",
    "content": "from typing import Type\n\nfrom sqlmodel import select, Session, or_, delete\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\n\nfrom app.api.admin_routes.knowledge_base.document.models import DocumentFilters\nfrom app.exceptions import DocumentNotFound\nfrom app.models import Document\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass DocumentRepo(BaseRepo):\n    model_cls = Document\n\n    def paginate(\n        self,\n        session: Session,\n        filters: DocumentFilters,\n        params: Params | None = Params(),\n    ) -> Page[Document]:\n        # build the select statement via conditions\n        stmt = select(Document)\n        if filters.knowledge_base_id:\n            stmt = stmt.where(Document.knowledge_base_id == filters.knowledge_base_id)\n        if filters.search:\n            stmt = stmt.where(\n                or_(\n                    Document.name.contains(filters.search),\n                    Document.source_uri.contains(filters.search),\n                )\n            )\n        if filters.data_source_id:\n            stmt = stmt.where(Document.data_source_id == filters.data_source_id)\n        if filters.created_at:\n            start_time, end_time = filters.created_at\n            if start_time:\n                stmt = stmt.where(Document.created_at >= start_time)\n            if end_time:\n                stmt = stmt.where(Document.created_at <= end_time)\n        if filters.updated_at:\n            start_time, end_time = filters.updated_at\n            if start_time:\n                stmt = stmt.where(Document.updated_at >= start_time)\n            if end_time:\n                stmt = stmt.where(Document.updated_at <= end_time)\n        if filters.last_modified_at:\n            start_time, end_time = filters.last_modified_at\n            if start_time:\n                stmt = stmt.where(Document.last_modified_at >= start_time)\n            if end_time:\n                stmt = stmt.where(Document.last_modified_at <= end_time)\n        if filters.mime_type:\n            stmt = stmt.where(Document.mime_type == filters.mime_type)\n        if filters.index_status:\n            stmt = stmt.where(Document.index_status == filters.index_status)\n\n        # Make sure the newer edited record is always on top\n        stmt = stmt.order_by(Document.updated_at.desc())\n\n        return paginate(session, stmt, params)\n\n    def must_get(self, session: Session, doc_id: int) -> Type[Document]:\n        doc = session.get(Document, doc_id)\n        if not doc:\n            raise DocumentNotFound(doc_id)\n        return doc\n\n    def delete_by_datasource(self, session: Session, datasource_id: int):\n        stmt = delete(Document).where(Document.data_source_id == datasource_id)\n        session.exec(stmt)\n\n    def fetch_by_ids(self, session: Session, document_ids: list[int]) -> list[Document]:\n        stmt = select(Document).where(Document.id.in_(document_ids))\n        return session.exec(stmt).all()\n\n\ndocument_repo = DocumentRepo()\n"
  },
  {
    "path": "backend/app/repositories/embedding_model.py",
    "content": "from typing import Optional, Type\n\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlalchemy.orm.attributes import flag_modified\nfrom sqlmodel import Session, select, update\n\nfrom app.api.admin_routes.embedding_model.models import (\n    EmbeddingModelUpdate,\n    EmbeddingModelCreate,\n)\nfrom app.exceptions import DefaultEmbeddingModelNotFound, EmbeddingModelNotFound\nfrom app.models import EmbeddingModel\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass EmbeddingModelRepo(BaseRepo):\n    model_cls = EmbeddingModel\n\n    def paginate(\n        self, session: Session, params: Params | None = Params()\n    ) -> Page[EmbeddingModel]:\n        query = select(EmbeddingModel)\n        # Make sure the default model is always on top.\n        query = query.order_by(\n            EmbeddingModel.is_default.desc(), EmbeddingModel.created_at.desc()\n        )\n        return paginate(session, query, params)\n\n    def get(self, session: Session, model_id: int) -> Optional[EmbeddingModel]:\n        return session.get(EmbeddingModel, model_id)\n\n    def must_get(self, session: Session, model_id: int) -> Type[EmbeddingModel]:\n        db_embed_model = self.get(session, model_id)\n        if db_embed_model is None:\n            raise EmbeddingModelNotFound(model_id)\n        return db_embed_model\n\n    def exists_any_model(self, session: Session) -> bool:\n        stmt = select(EmbeddingModel).with_for_update().limit(1)\n        return session.exec(stmt).one_or_none() is not None\n\n    def create(self, session: Session, create: EmbeddingModelCreate):\n        # If there is currently no model, the first model will be\n        # set as the default model.\n        if not self.exists_any_model(session):\n            create.is_default = True\n\n        if create.is_default:\n            self._unset_default(session)\n\n        embed_model = EmbeddingModel(\n            name=create.name,\n            provider=create.provider,\n            model=create.model,\n            vector_dimension=create.vector_dimension,\n            config=create.config,\n            credentials=create.credentials,\n            is_default=create.is_default,\n        )\n        session.add(embed_model)\n        session.commit()\n        session.refresh(embed_model)\n\n        return embed_model\n\n    def update(\n        self,\n        session: Session,\n        embed_model: EmbeddingModel,\n        partial_update: EmbeddingModelUpdate,\n    ) -> EmbeddingModel:\n        for field, value in partial_update.model_dump(exclude_unset=True).items():\n            setattr(embed_model, field, value)\n            flag_modified(embed_model, field)\n\n        session.commit()\n        session.refresh(embed_model)\n        return embed_model\n\n    def delete(self, session: Session, model: EmbeddingModel):\n        # TODO: Support to specify a new embedding model to replace the current embedding model.\n        session.exec(\n            update(KnowledgeBase)\n            .where(KnowledgeBase.embedding_model_id == model.id)\n            .values(embedding_model_id=None)\n        )\n\n        session.delete(model)\n        session.commit()\n\n    # Default model\n\n    def get_default(self, session: Session) -> Type[EmbeddingModel]:\n        stmt = select(EmbeddingModel).where(EmbeddingModel.is_default == True).limit(1)\n        return session.exec(stmt).first()\n\n    def has_default(self, session: Session) -> bool:\n        return self.get_default(session) is not None\n\n    def must_get_default(self, session: Session) -> Type[EmbeddingModel]:\n        embed_model = self.get_default(session)\n        if embed_model is None:\n            raise DefaultEmbeddingModelNotFound()\n        return embed_model\n\n    def _unset_default(self, session: Session):\n        session.exec(\n            update(EmbeddingModel)\n            .values(is_default=False)\n            .where(EmbeddingModel.is_default == True)\n        )\n\n    def set_default(self, session: Session, model: EmbeddingModel):\n        self._unset_default(session)\n        model.is_default = True\n        flag_modified(model, \"is_default\")\n        session.commit()\n        session.refresh(model)\n        return model\n\n\nembedding_model_repo = EmbeddingModelRepo()\n"
  },
  {
    "path": "backend/app/repositories/feedback.py",
    "content": "from sqlmodel import select, Session, col, func, desc\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom typing import Optional\n\nfrom app.models import Feedback, AdminFeedbackPublic, FeedbackFilters\nfrom app.models.feedback import FeedbackOrigin\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass FeedbackRepo(BaseRepo):\n    model_cls = Feedback\n\n    def paginate(\n        self,\n        session: Session,\n        filters: FeedbackFilters,\n        params: Params | None = Params(),\n    ) -> Page[AdminFeedbackPublic]:\n        # build the select statement via conditions\n        stmt = select(Feedback)\n        if filters.created_at_start:\n            stmt = stmt.where(Feedback.created_at >= filters.created_at_start)\n        if filters.created_at_end:\n            stmt = stmt.where(Feedback.created_at <= filters.created_at_end)\n        if filters.feedback_origin:\n            stmt = stmt.where(col(Feedback.origin).contains(filters.feedback_origin))\n        if filters.chat_id:\n            stmt = stmt.where(Feedback.chat_id == filters.chat_id)\n        if filters.feedback_type:\n            stmt = stmt.where(Feedback.feedback_type == filters.feedback_type)\n        if filters.user_id:\n            stmt = stmt.where(Feedback.user_id == filters.user_id)\n\n        stmt = stmt.order_by(Feedback.created_at.desc())\n        return paginate(\n            session,\n            stmt,\n            params,\n            transformer=lambda items: [\n                AdminFeedbackPublic(\n                    **item.model_dump(),\n                    chat_title=item.chat.title,\n                    chat_origin=item.chat.origin,\n                    chat_message_content=item.chat_message.content,\n                    user_email=item.user.email if item.user else None,\n                )\n                for item in items\n            ],\n        )\n\n    def list_feedback_origins(\n        self,\n        session: Session,\n        search: Optional[str] = None,\n        params: Params | None = Params(),\n    ) -> Page[FeedbackOrigin]:\n        query = select(\n            Feedback.origin, func.count(Feedback.id).label(\"feedbacks\")\n        ).group_by(Feedback.origin)\n\n        if search:\n            query = query.where(Feedback.origin.ilike(f\"%{search}%\"))\n\n        query = query.order_by(desc(\"feedbacks\"))\n\n        return paginate(\n            session,\n            query,\n            params,\n            transformer=lambda items: [\n                FeedbackOrigin(origin=item[0], feedbacks=item[1]) for item in items\n            ],\n        )\n\n\nfeedback_repo = FeedbackRepo()\n"
  },
  {
    "path": "backend/app/repositories/graph.py",
    "content": "from typing import Type\n\nfrom sqlmodel import Session, select, func, delete, SQLModel\n\nfrom app.models.document import Document\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.models.chunk import get_kb_chunk_model\nfrom app.models.entity import get_kb_entity_model\nfrom app.models.relationship import get_kb_relationship_model\n\n\nclass GraphRepo:\n    def __init__(\n        self,\n        entity_model: Type[SQLModel],\n        relationship_model: Type[SQLModel],\n        chunk_model: Type[SQLModel],\n    ):\n        self.entity_model = entity_model\n        self.relationship_model = relationship_model\n        self.chunk_model = chunk_model\n\n    def count_entities(self, session: Session):\n        return session.scalar(select(func.count(self.entity_model.id)))\n\n    def count_relationships(self, session: Session):\n        return session.scalar(select(func.count(self.relationship_model.id)))\n\n    def delete_orphaned_entities(self, session: Session):\n        orphaned_entity_ids = (\n            select(self.entity_model.id)\n            .outerjoin(\n                self.relationship_model,\n                (self.relationship_model.target_entity_id == self.entity_model.id)\n                | (self.relationship_model.source_entity_id == self.entity_model.id),\n            )\n            .where(self.relationship_model.id.is_(None))\n            .scalar_subquery()\n        )\n        stmt = delete(self.entity_model).where(\n            self.entity_model.id.in_(orphaned_entity_ids)\n        )\n        session.exec(stmt)\n\n    def delete_data_source_relationships(self, session: Session, datasource_id: int):\n        doc_ids_subquery = select(Document.id).where(\n            Document.data_source_id == datasource_id\n        )\n        chunk_ids_subquery = select(self.chunk_model.id).where(\n            self.chunk_model.document_id.in_(doc_ids_subquery)\n        )\n        stmt = delete(self.relationship_model).where(\n            self.relationship_model.chunk_id.in_(chunk_ids_subquery)\n        )\n        session.exec(stmt)\n\n    def delete_document_relationships(self, session: Session, document_id: int):\n        chunk_ids_subquery = select(self.chunk_model.id).where(\n            self.chunk_model.document_id == document_id\n        )\n        stmt = delete(self.relationship_model).where(\n            self.relationship_model.chunk_id.in_(chunk_ids_subquery)\n        )\n        session.exec(stmt)\n\n\ndef get_kb_graph_repo(kb: KnowledgeBase) -> GraphRepo:\n    chunk_model = get_kb_chunk_model(kb)\n    entity_model = get_kb_entity_model(kb)\n    relationship_model = get_kb_relationship_model(kb)\n    return GraphRepo(entity_model, relationship_model, chunk_model)\n"
  },
  {
    "path": "backend/app/repositories/knowledge_base.py",
    "content": "from typing import List, Optional, Type\nfrom datetime import datetime, UTC\n\nfrom sqlalchemy import delete\nfrom sqlalchemy.orm.attributes import flag_modified\nfrom sqlmodel import SQLModel, select, Session, func, update\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\n\nfrom app.api.admin_routes.knowledge_base.models import (\n    VectorIndexError,\n    KGIndexError,\n    KnowledgeBaseUpdate,\n)\nfrom app.exceptions import KBDataSourceNotFound, KBNotFound\nfrom app.models import (\n    KnowledgeBase,\n    Document,\n    DocIndexTaskStatus,\n    KgIndexStatus,\n    KnowledgeBaseDataSource,\n)\nfrom app.models.chat_engine import ChatEngine\nfrom app.models.chunk import get_kb_chunk_model\nfrom app.models.data_source import DataSource\nfrom app.models.knowledge_base import IndexMethod\nfrom app.repositories.base_repo import BaseRepo\nfrom app.repositories.chunk import ChunkRepo\nfrom app.repositories.graph import get_kb_graph_repo\n\n\nclass KnowledgeBaseRepo(BaseRepo):\n    model_cls = KnowledgeBase\n\n    def paginate(\n        self, session: Session, params: Params | None = Params()\n    ) -> Page[KnowledgeBase]:\n        query = (\n            select(KnowledgeBase)\n            .where(KnowledgeBase.deleted_at == None)\n            .order_by(KnowledgeBase.created_at.desc())\n        )\n        return paginate(session, query, params)\n\n    def get(\n        self, session: Session, knowledge_base_id: int, show_soft_deleted: bool = True\n    ) -> Optional[KnowledgeBase]:\n        stmt = select(KnowledgeBase).where(KnowledgeBase.id == knowledge_base_id)\n\n        if not show_soft_deleted:\n            stmt = stmt.where(KnowledgeBase.deleted_at == None)\n\n        return session.exec(stmt).first()\n\n    def must_get(\n        self, session: Session, knowledge_base_id: int, show_soft_deleted: bool = True\n    ) -> Optional[KnowledgeBase]:\n        kb = self.get(session, knowledge_base_id, show_soft_deleted)\n        if kb is None:\n            raise KBNotFound(knowledge_base_id)\n        return kb\n\n    def get_by_ids(\n        self, session: Session, knowledge_base_ids: List[int]\n    ) -> List[KnowledgeBase]:\n        return session.exec(\n            select(KnowledgeBase).where(KnowledgeBase.id.in_(knowledge_base_ids))\n        ).all()\n\n    def update(\n        self,\n        session: Session,\n        knowledge_base: KnowledgeBase,\n        partial_update: KnowledgeBaseUpdate,\n    ) -> KnowledgeBase:\n        for field, value in partial_update.model_dump(exclude_unset=True).items():\n            setattr(knowledge_base, field, value)\n            flag_modified(knowledge_base, field)\n\n        session.commit()\n        session.refresh(knowledge_base)\n        return knowledge_base\n\n    def delete(self, session: Session, knowledge_base: KnowledgeBase) -> None:\n        knowledge_base.deleted_at = datetime.now(UTC)\n        session.add(knowledge_base)\n        session.commit()\n\n    def get_index_overview(self, session: Session, kb: KnowledgeBase) -> dict:\n        # TODO: store and query the count numbers in the knowledge base table.\n        documents_total = self.count_documents(session, kb)\n        chunks_total = self.count_chunks(session, kb)\n        overview_data = {\n            \"documents\": {\"total\": documents_total},\n            \"chunks\": {\"total\": chunks_total},\n        }\n\n        if IndexMethod.VECTOR in kb.index_methods:\n            overview_data.update(\n                self.count_documents_by_vector_index_status(session, kb)\n            )\n\n        if IndexMethod.KNOWLEDGE_GRAPH in kb.index_methods:\n            relationships_total = self.count_relationships(session, kb)\n            entities_total = self.count_entities(session, kb)\n            overview_data.update(\n                {\n                    \"entities\": {\"total\": entities_total},\n                    \"relationships\": {\"total\": relationships_total},\n                }\n            )\n            overview_data.update(self.count_chunks_by_kg_index_status(session, kb))\n\n        return overview_data\n\n    def count_data_sources(self, session: Session, kb: KnowledgeBase) -> int:\n        return session.scalar(\n            select(func.count(KnowledgeBaseDataSource.data_source_id)).where(\n                KnowledgeBaseDataSource.knowledge_base_id == kb.id\n            )\n        )\n\n    def count_documents(self, session: Session, kb: KnowledgeBase) -> int:\n        return session.scalar(\n            select(func.count(Document.id)).where(Document.knowledge_base_id == kb.id)\n        )\n\n    def count_chunks(self, session: Session, kb: KnowledgeBase):\n        chunk_repo = ChunkRepo(get_kb_chunk_model(kb))\n        return chunk_repo.count(session)\n\n    def count_relationships(self, session: Session, kb: KnowledgeBase):\n        graph_repo = get_kb_graph_repo(kb)\n        return graph_repo.count_relationships(session)\n\n    def count_entities(self, session: Session, kb: KnowledgeBase):\n        graph_repo = get_kb_graph_repo(kb)\n        return graph_repo.count_entities(session)\n\n    def count_documents_by_vector_index_status(\n        self, session: Session, kb: KnowledgeBase\n    ) -> dict:\n        stmt = (\n            select(Document.index_status, func.count(Document.id))\n            .where(Document.knowledge_base_id == kb.id)\n            .group_by(Document.index_status)\n            .order_by(Document.index_status)\n        )\n        results = session.exec(stmt).all()\n        vector_index_status = {s: c for s, c in results}\n\n        return {\n            \"vector_index\": vector_index_status,\n        }\n\n    def count_chunks_by_kg_index_status(\n        self, session: Session, kb: KnowledgeBase\n    ) -> dict:\n        # FIXME: Maybe we should count the documents (instead of chunks) like vector index?\n        chunk_model = get_kb_chunk_model(kb)\n        stmt = (\n            select(chunk_model.index_status, func.count(chunk_model.id))\n            .where(chunk_model.document.has(Document.knowledge_base_id == kb.id))\n            .group_by(chunk_model.index_status)\n            .order_by(chunk_model.index_status)\n        )\n        results = session.exec(stmt).all()\n        kg_index_status = {s: c for s, c in results}\n\n        return {\"kg_index\": kg_index_status}\n\n    def batch_update_document_status(\n        self, session: Session, document_ids: list[int], status: DocIndexTaskStatus\n    ):\n        stmt = (\n            update(Document)\n            .where(Document.id.in_(document_ids))\n            .values(index_status=status)\n        )\n        session.exec(stmt)\n        session.commit()\n\n    def set_failed_documents_status_to_pending(\n        self, session: Session, kb: KnowledgeBase\n    ) -> list[int]:\n        stmt = select(Document.id).where(\n            Document.knowledge_base_id == kb.id,\n            Document.index_status == DocIndexTaskStatus.FAILED,\n        )\n        failed_document_ids = session.exec(stmt).all()\n        self.batch_update_document_status(\n            session, failed_document_ids, DocIndexTaskStatus.PENDING\n        )\n        return failed_document_ids\n\n    def batch_update_chunk_status(\n        self,\n        session: Session,\n        chunk_model: Type[SQLModel],\n        chunk_ids: list[int],\n        status: KgIndexStatus,\n    ):\n        stmt = (\n            update(chunk_model)\n            .where(chunk_model.id.in_(chunk_ids))\n            .values(index_status=status)\n        )\n        session.exec(stmt)\n        session.commit()\n\n    def set_failed_chunks_status_to_pending(\n        self, session: Session, kb: KnowledgeBase\n    ) -> list[int]:\n        chunk_model = get_kb_chunk_model(kb)\n        stmt = select(chunk_model.id).where(\n            chunk_model.document.has(Document.knowledge_base_id == kb.id),\n            chunk_model.index_status == KgIndexStatus.FAILED,\n        )\n        chunk_ids = session.exec(stmt).all()\n\n        # Update status.\n        self.batch_update_chunk_status(\n            session, chunk_model, chunk_ids, KgIndexStatus.PENDING\n        )\n\n        return chunk_ids\n\n    def list_vector_index_built_errors(\n        self,\n        session: Session,\n        kb: KnowledgeBase,\n        params: Params | None = Params(),\n    ) -> Page[VectorIndexError]:\n        query = (\n            select(\n                Document.id,\n                Document.name,\n                Document.source_uri,\n                Document.index_result,\n            )\n            .where(\n                Document.knowledge_base_id == kb.id,\n                Document.index_status == DocIndexTaskStatus.FAILED,\n            )\n            .order_by(Document.id.desc())\n        )\n\n        return paginate(\n            session,\n            query,\n            params,\n            transformer=lambda rows: [\n                VectorIndexError(\n                    document_id=row[0],\n                    document_name=row[1],\n                    source_uri=row[2],\n                    error=row[3],\n                )\n                for row in rows\n            ],\n        )\n\n    def list_kg_index_built_errors(\n        self,\n        session: Session,\n        kb: KnowledgeBase,\n        params: Params | None = Params(),\n    ) -> Page[KGIndexError]:\n        chunk_model = get_kb_chunk_model(kb)\n        query = (\n            select(\n                Document.id,\n                Document.name,\n                chunk_model.source_uri,\n                chunk_model.id,\n                chunk_model.index_result,\n            )\n            .join(Document)\n            .where(\n                chunk_model.document_id == Document.id,\n                Document.knowledge_base_id == kb.id,\n                chunk_model.index_status == KgIndexStatus.FAILED,\n            )\n            .order_by(chunk_model.id.desc())\n        )\n\n        return paginate(\n            session,\n            query,\n            params,\n            transformer=lambda rows: [\n                KGIndexError(\n                    document_id=row[0],\n                    document_name=row[1],\n                    source_uri=row[2],\n                    chunk_id=row[3],\n                    error=row[4],\n                )\n                for row in rows\n            ],\n        )\n\n    def get_kb_datasource(\n        self,\n        session: Session,\n        kb: KnowledgeBase,\n        datasource_id: int,\n        show_soft_deleted: bool = False,\n    ) -> DataSource:\n        stmt = select(DataSource).where(DataSource.id == datasource_id)\n        if not show_soft_deleted:\n            stmt = stmt.where(DataSource.deleted_at == None)\n        return session.exec(stmt).first()\n\n    def must_get_kb_datasource(\n        self,\n        session: Session,\n        kb: KnowledgeBase,\n        datasource_id: int,\n        show_soft_deleted: bool = False,\n    ) -> DataSource:\n        data_source = self.get_kb_datasource(\n            session, kb, datasource_id, show_soft_deleted\n        )\n        if data_source is None:\n            raise KBDataSourceNotFound(kb.id, datasource_id)\n        return data_source\n\n    def add_kb_datasource(\n        self, session: Session, kb: KnowledgeBase, data_source: DataSource\n    ) -> DataSource:\n        session.add(data_source)\n        kb.data_sources.append(data_source)\n\n        session.add(kb)\n        session.commit()\n        session.refresh(data_source)\n\n        return data_source\n\n    def list_kb_datasources(\n        self, session: Session, kb_id: int, params: Params | None = Params()\n    ) -> Page[DataSource]:\n        query = (\n            select(DataSource)\n            .join(KnowledgeBaseDataSource)\n            .where(\n                DataSource.deleted_at == None,\n                KnowledgeBaseDataSource.knowledge_base_id == kb_id,\n            )\n            .order_by(DataSource.created_at.desc())\n        )\n        return paginate(session, query, params)\n\n    def remove_kb_datasource(\n        self, session: Session, kb: KnowledgeBase, data_source: DataSource\n    ) -> None:\n        # Flag the data source to be deleted.\n        data_source.deleted_at = datetime.now(UTC)\n        session.add(data_source)\n\n        # Remove the data source from the knowledge base.\n        stmt = delete(KnowledgeBaseDataSource).where(\n            KnowledgeBaseDataSource.knowledge_base_id == kb.id,\n            KnowledgeBaseDataSource.data_source_id == data_source.id,\n        )\n        session.exec(stmt)\n\n    def list_linked_chat_engines(\n        self, session: Session, kb_id: int\n    ) -> List[ChatEngine]:\n        return session.exec(\n            select(ChatEngine).where(\n                ChatEngine.deleted_at == None,\n                func.JSON_UNQUOTE(\n                    func.JSON_EXTRACT(\n                        ChatEngine.engine_options,\n                        \"$.knowledge_base.linked_knowledge_base.id\",\n                    )\n                )\n                == kb_id,\n            )\n        ).all()\n\n\nknowledge_base_repo = KnowledgeBaseRepo()\n"
  },
  {
    "path": "backend/app/repositories/llm.py",
    "content": "from typing import Type, Optional\n\nfrom fastapi import Depends\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlalchemy import update\nfrom sqlalchemy.orm.attributes import flag_modified\nfrom sqlmodel import select, Session\n\nfrom app.exceptions import DefaultLLMNotFound, LLMNotFound\nfrom app.models import LLM, LLMUpdate\nfrom app.models.chat_engine import ChatEngine\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass LLMRepo(BaseRepo):\n    model_cls: LLM\n\n    def paginate(self, session: Session, params: Params = Depends()) -> Page[LLM]:\n        query = select(LLM)\n        # Make sure the default llm is always on top.\n        query = query.order_by(LLM.is_default.desc(), LLM.created_at.desc())\n        return paginate(session, query, params)\n\n    def get(self, session: Session, llm_id: int) -> Optional[LLM]:\n        return session.get(LLM, llm_id)\n\n    def must_get(self, session: Session, llm_id: int) -> LLM:\n        db_llm = self.get(session, llm_id)\n        if db_llm is None:\n            raise LLMNotFound(llm_id)\n        return db_llm\n\n    def exists_any_model(self, session: Session) -> bool:\n        stmt = select(LLM).with_for_update().limit(1)\n        return session.exec(stmt).one_or_none() is not None\n\n    def create(self, session: Session, llm: LLM) -> LLM:\n        # If there is no exiting model, the first model is\n        # automatically set as the default model.\n        if not self.exists_any_model(session):\n            llm.is_default = True\n\n        if llm.is_default:\n            self._unset_default(session)\n\n        llm.id = None\n        session.add(llm)\n        session.commit()\n        session.refresh(llm)\n\n        return llm\n\n    def update(self, session: Session, llm: LLM, llm_update: LLMUpdate) -> LLM:\n        for field, value in llm_update.model_dump(exclude_unset=True).items():\n            setattr(llm, field, value)\n            flag_modified(llm, field)\n\n        session.commit()\n        session.refresh(llm)\n        return llm\n\n    # Default model\n\n    def get_default(self, session: Session) -> Type[LLM] | None:\n        stmt = (\n            select(LLM)\n            .where(LLM.is_default == True)\n            .order_by(LLM.updated_at.desc())\n            .limit(1)\n        )\n        return session.exec(stmt).first()\n\n    def has_default(self, session: Session) -> bool:\n        return self.get_default(session) is not None\n\n    def must_get_default(self, session: Session) -> Type[LLM]:\n        db_llm = self.get_default(session)\n        if db_llm is None:\n            raise DefaultLLMNotFound()\n        return db_llm\n\n    def _unset_default(self, session: Session):\n        session.exec(update(LLM).values(is_default=False))\n\n    def set_default(self, session: Session, llm: LLM) -> LLM:\n        self._unset_default(session)\n        llm.is_default = True\n        session.add(llm)\n        session.commit()\n        session.refresh(llm)\n        return llm\n\n    def delete(self, session: Session, llm: LLM):\n        # TODO: Support to specify a new LLM to replace the current LLM.\n        session.exec(\n            update(ChatEngine).where(ChatEngine.llm_id == llm.id).values(llm_id=None)\n        )\n        session.exec(\n            update(ChatEngine)\n            .where(ChatEngine.fast_llm_id == llm.id)\n            .values(fast_llm_id=None)\n        )\n        session.exec(\n            update(KnowledgeBase)\n            .where(KnowledgeBase.llm_id == llm.id)\n            .values(llm_id=None)\n        )\n\n        session.delete(llm)\n        session.commit()\n\n\nllm_repo = LLMRepo()\n"
  },
  {
    "path": "backend/app/repositories/reranker_model.py",
    "content": "from typing import Optional\n\nfrom fastapi_pagination import Params, Page\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlalchemy import update\nfrom sqlalchemy.orm.attributes import flag_modified\nfrom sqlmodel import Session, select\n\nfrom app.exceptions import RerankerModelNotFound, DefaultRerankerModelNotFound\nfrom app.models import RerankerModel\nfrom app.models.chat_engine import ChatEngine\nfrom app.models.reranker_model import RerankerModelUpdate\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass RerankerModelRepo(BaseRepo):\n    model_cls: RerankerModel\n\n    def paginate(\n        self, session: Session, params: Params | None = Params()\n    ) -> Page[RerankerModel]:\n        query = select(RerankerModel)\n        # Make sure the default reranker model is always on top.\n        query = query.order_by(\n            RerankerModel.is_default.desc(), RerankerModel.created_at.desc()\n        )\n        return paginate(session, query, params)\n\n    def get(self, session: Session, model_id: int) -> Optional[RerankerModel]:\n        return session.get(RerankerModel, model_id)\n\n    def must_get(self, session: Session, model_id: int) -> RerankerModel:\n        db_model = self.get(session, model_id)\n        if db_model is None:\n            raise RerankerModelNotFound(model_id)\n        return db_model\n\n    def exists_any_model(self, session: Session) -> bool:\n        stmt = select(RerankerModel).with_for_update().limit(1)\n        return session.exec(stmt).one_or_none() is not None\n\n    def create(self, session: Session, reranker_model: RerankerModel) -> RerankerModel:\n        # If there is no exiting model, the first model will be\n        # set as the default model.\n        if not self.exists_any_model(session):\n            reranker_model.is_default = True\n\n        if reranker_model.is_default:\n            self.unset_default(session)\n\n        reranker_model.id = None\n        session.add(reranker_model)\n        session.commit()\n        session.refresh(reranker_model)\n\n        return reranker_model\n\n    def update(\n        self,\n        session: Session,\n        reranker_model: RerankerModel,\n        model_update: RerankerModelUpdate,\n    ) -> RerankerModel:\n        for field, value in model_update.model_dump(exclude_unset=True).items():\n            setattr(reranker_model, field, value)\n            flag_modified(reranker_model, field)\n\n        session.commit()\n        session.refresh(reranker_model)\n        return reranker_model\n\n    def delete(self, db_session: Session, reranker_model: RerankerModel):\n        # TODO: Support to specify a new reranker model to replace the current reranker model.\n        db_session.exec(\n            update(ChatEngine)\n            .where(ChatEngine.reranker_id == reranker_model.id)\n            .values(reranker_id=None)\n        )\n\n        db_session.delete(reranker_model)\n        db_session.commit()\n\n    # Default model\n\n    def get_default(self, session: Session) -> Optional[RerankerModel]:\n        stmt = select(RerankerModel).where(RerankerModel.is_default == True).limit(1)\n        return session.exec(stmt).first()\n\n    def has_default(self, session: Session) -> bool:\n        return self.get_default(session) is not None\n\n    def must_get_default(self, session: Session) -> RerankerModel:\n        db_reranker_model = self.get_default(session)\n        if db_reranker_model is None:\n            raise DefaultRerankerModelNotFound()\n        return db_reranker_model\n\n    def unset_default(self, session: Session):\n        session.exec(update(RerankerModel).values(is_default=False))\n\n    def set_default(self, session: Session, model: RerankerModel):\n        self.unset_default(session)\n        model.is_default = True\n        flag_modified(model, \"is_default\")\n        session.commit()\n        session.refresh(model)\n        return model\n\n\nreranker_model_repo = RerankerModelRepo()\n"
  },
  {
    "path": "backend/app/repositories/staff_action_log.py",
    "content": "from sqlmodel import Session\n\nfrom app.models.staff_action_log import StaffActionLog\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass StaffActionRepo(BaseRepo):\n    model_cls = StaffActionLog\n\n    def create_staff_action_log(\n        self, session: Session, action, target_type, before, after, commit=True\n    ) -> StaffActionLog:\n        staff_action_log = StaffActionLog(\n            action=action, target_type=target_type, before=before, after=after\n        )\n        session.add(staff_action_log)\n        if commit:\n            session.commit()\n        return staff_action_log\n\n\nstaff_action_repo = StaffActionRepo()\n"
  },
  {
    "path": "backend/app/repositories/user.py",
    "content": "from typing import Optional\nfrom fastapi_pagination import Page, Params\nfrom fastapi_pagination.ext.sqlmodel import paginate\nfrom sqlmodel import Session, select\nfrom app.models.auth import User\nfrom app.repositories.base_repo import BaseRepo\n\n\nclass UserRepo(BaseRepo):\n    model_cls: User\n\n    def search_users(\n        self,\n        db_session: Session,\n        search: Optional[str] = None,\n        params: Params = Params(),\n    ) -> Page[User]:\n        query = select(User)\n\n        if search:\n            query = query.where(User.email.ilike(f\"%{search}%\"))\n\n        query = query.order_by(User.id)\n        return paginate(\n            db_session,\n            query,\n            params,\n        )\n\n\nuser_repo = UserRepo()\n"
  },
  {
    "path": "backend/app/site_settings/__init__.py",
    "content": "import time\nimport threading\nimport logging\nfrom sqlmodel import Session, select\n\nfrom app.models import SiteSetting as DBSiteSetting\nfrom app.core.db import engine\nfrom app.site_settings.default import default_settings\nfrom app.site_settings.types import SettingValue, SettingType\n\nlogger = logging.getLogger(__name__)\n\n\ndef get_settings_from_db(session: Session):\n    logger.debug(\"Getting settings from the database.\")\n    stmt = select(DBSiteSetting.name, DBSiteSetting.value)\n    results = session.exec(stmt)\n    return {name: value for name, value in results}\n\n\ndef get_db_last_updated_at(session: Session):\n    logger.debug(\"Getting the last updated_at timestamp from the database.\")\n    stmt = (\n        select(DBSiteSetting.updated_at)\n        .order_by(DBSiteSetting.updated_at.desc())\n        .limit(1)\n    )\n    result = session.exec(stmt).first()\n    return result.timestamp() if result else 0\n\n\ntype_mapping = {\n    \"str\": str,\n    \"int\": int,\n    \"float\": float,\n    \"bool\": bool,\n    \"dict\": dict,\n    \"list\": list,\n}\n\n\nclass SiteSettingProxy:\n    __db_cache: dict = {}\n    __last_updated_at_ts: float = 0\n    __last_checked_at_ts: float = 0\n    __mutex = threading.Lock()\n\n    def update_db_cache(self, force_check=False):\n        # Check if we need to update the cache every 6 seconds,\n        # so it means settings will not be updated in real-time\n        # which is acceptable for this project.\n        # If we need real-time updates in the future, we can use\n        # a message queue or a pub/sub system to notify the app.\n        now = time.time()\n        if force_check or (now - self.__last_checked_at_ts > 6):\n            self.__last_checked_at_ts = now\n            with Session(engine) as session:\n                last_updated_at_ts = get_db_last_updated_at(session)\n\n                if last_updated_at_ts > self.__last_updated_at_ts:\n                    with self.__mutex:\n                        if last_updated_at_ts > self.__last_updated_at_ts:\n                            self.__db_cache = get_settings_from_db(session)\n                            self.__last_updated_at_ts = last_updated_at_ts\n\n    def get_db_cache(self) -> dict:\n        # Should we use a lock here?\n        with self.__mutex:\n            return self.__db_cache\n\n    def __getattr__(self, name: str) -> SettingType:\n        return self.get_setting(name)\n\n    def get_setting(self, name: str) -> SettingType:\n        if hasattr(default_settings, name):\n            default_setting = getattr(default_settings, name)\n            self.update_db_cache()\n            db_value = self.__db_cache.get(name)\n            return db_value if db_value is not None else default_setting.default\n        else:\n            raise AttributeError(f\"Setting {name} does not exist.\")\n\n    def get_all_settings(\n        self, force_check_db_cache: bool = False\n    ) -> dict[str, SettingValue]:\n        self.update_db_cache(force_check_db_cache)\n\n        result = {}\n        for _, settings in default_settings.setting_groups.items():\n            for default_setting in settings:\n                db_value = self.__db_cache.get(default_setting.name)\n                result[default_setting.name] = SettingValue(\n                    name=default_setting.name,\n                    default=default_setting.default,\n                    value=db_value if db_value is not None else default_setting.default,\n                    data_type=default_setting.data_type,\n                    description=default_setting.description,\n                    group=default_setting.group,\n                    client=default_setting.client,\n                )\n        return result\n\n    def get_client_settings(self) -> dict:\n        # Retrieve all client settings utilized in the frontend.\n        # These settings determine the behavior of the frontend and are accessible to all users.\n        settings = self.get_all_settings()\n        return {k: s.value for k, s in settings.items() if s.client}\n\n    def setting_exists(self, name: str) -> bool:\n        return hasattr(default_settings, name)\n\n    def update_setting(self, session: Session, name: str, value: SettingType):\n        if not self.setting_exists(name):\n            raise AttributeError(f\"Setting {name} does not exist.\")\n\n        _default_setting: SettingValue = getattr(default_settings, name)\n        if not isinstance(value, type_mapping[_default_setting.data_type]):\n            raise ValueError(f\"{name} must be of type `{_default_setting.data_type}`.\")\n\n        db_setting_obj = session.exec(\n            select(DBSiteSetting).filter(DBSiteSetting.name == name)\n        ).first()\n        if db_setting_obj:\n            db_setting_obj.value = value\n        else:\n            db_setting_obj = DBSiteSetting(\n                name=name, value=value, data_type=_default_setting.data_type\n            )\n            session.add(db_setting_obj)\n        session.commit()\n\n        self.update_db_cache(force_check=True)\n\n\nSiteSetting = SiteSettingProxy()\n\n\n__all__ = [\"SiteSetting\", \"SettingValue\", \"SettingType\"]\n"
  },
  {
    "path": "backend/app/site_settings/default.py",
    "content": "import yaml\nimport threading\nfrom collections import defaultdict\n\nfrom app.site_settings.types import SettingValue\n\n\nDEFAULT_YAML_FILE = \"./app/site_settings/default_settings.yml\"\n\n\nclass DefaultSettings:\n    setting_groups: dict[str, list[SettingValue]] = defaultdict(list)\n\n    __mutex = threading.Lock()\n    __loaded = False\n\n    def __init__(self):\n        with self.__mutex:\n            if not self.__loaded:\n                self.load_default_from_yaml()\n                self.__loaded = True\n\n    def load_default_from_yaml(self):\n        with open(DEFAULT_YAML_FILE, \"r\") as f:\n            data = yaml.safe_load(f)\n            for group, settings in data.items():\n                for name, value in settings.items():\n                    setattr(\n                        self,\n                        name,\n                        SettingValue(**value, name=name, group=group),\n                    )\n                    self.setting_groups[group].append(getattr(self, name))\n\n\ndefault_settings = DefaultSettings()\n"
  },
  {
    "path": "backend/app/site_settings/default_settings.yml",
    "content": "website:\n  title:\n    default: \"TiDB.AI\"\n    data_type: str\n    description: \"The title of the website.\"\n    client: true\n  description:\n    default: \"Knowledge Graph based RAG that built with TiDB Serverless Vector Storage and LlamaIndex\"\n    data_type: str\n    description: \"The description of the website.\"\n    client: true\n  homepage_title:\n    default: \"Ask anything about TiDB\"\n    data_type: str\n    description: \"The title of the homepage.\"\n    client: true\n  homepage_example_questions:\n    default:\n      - \"What is TiDB?\"\n      - \"Does TiDB support FOREIGN KEY?\"\n      - \"Does TiDB support serverless?\"\n    data_type: list\n    description: \"The example questions on the homepage.\"\n    client: true\n  homepage_footer_links:\n    default:\n      - text: \"Release Notes\"\n        href: \"https://autoflow.tidb.ai/releases\"\n      - text: \"Powered by TiDB\"\n        href: \"https://tidb.cloud/ai\"\n      - text: \"© 2024 PingCAP\"\n        href: \"https://pingcap.com\"\n    data_type: list\n    description: \"The footer links on the homepage.\"\n    client: true\n  logo_in_dark_mode:\n    default: \"https://tidb.ai/tidb-ai-light.svg\"\n    data_type: str\n    description: \"The logo in dark mode.\"\n    client: true\n  logo_in_light_mode:\n    default: \"https://tidb.ai/tidb-ai.svg\"\n    data_type: str\n    description: \"The logo in light mode.\"\n    client: true\n  social_github:\n    default: \"https://github.com/pingcap/tidb.ai\"\n    data_type: str\n    description: \"The link to the github\"\n    client: true\n  social_twitter:\n    default: \"https://twitter.com/PingCAP\"\n    data_type: str\n    description: \"The link to the twitter\"\n    client: true\n  social_discord:\n    default: \"https://discord.gg/XzSW23Jg9p\"\n    data_type: str\n    description: \"The link to the discord\"\n    client: true\n  ga_id:\n    default: \"\"\n    data_type: str\n    description: \"Google Analytics ID\"\n    client: true\n\ncustom_js:\n  custom_js_example_questions:\n    default:\n      - \"What is TiDB?\"\n      - \"Does TiDB support FOREIGN KEY?\"\n      - \"Does TiDB support serverless?\"\n    data_type: list\n    description: \"The example questions on the custom_js.\"\n    client: true\n  custom_js_button_label:\n    default: \"Ask AI\"\n    data_type: str\n    description: \"The button label on the custom_js.\"\n    client: true\n  custom_js_button_img_src:\n    default: \"https://tidb.ai/tidb-ai-widget.svg\"\n    data_type: str\n    description: \"The button image on the custom_js.\"\n    client: true\n  custom_js_logo_src:\n    default: \"https://tidb.ai/tidb-ai-widget.svg\"\n    data_type: str\n    description: \"The logo on the custom_js.\"\n    client: true\n\nchat:\n  langfuse_secret_key:\n    default: \"\"\n    data_type: str\n    description: \"The secret key of Langfuse.\"\n    client: false\n  langfuse_public_key:\n    default: \"\"\n    data_type: str\n    description: \"The public key of Langfuse.\"\n    client: false\n  langfuse_host:\n    default: \"https://us.cloud.langfuse.com\"\n    data_type: str\n    description: \"The host of Langfuse.\"\n    client: false\n  enable_post_verifications:\n    default: true\n    data_type: bool\n    description: \"Enable post verification for all chats.\"\n    client: true\n  enable_post_verifications_for_widgets:\n    default: false\n    data_type: bool\n    description: \"Enable post verification for chats from js widgets.\"\n    client: true\n\nupload:\n  max_upload_file_size:\n    default: 10485760    # 10 MiB\n    data_type: int\n    description: \"Max body size (in bytes) of upload file.\"\n    client: true"
  },
  {
    "path": "backend/app/site_settings/types.py",
    "content": "from typing import TypeAlias\nfrom dataclasses import dataclass\n\n\nSettingType: TypeAlias = bool | str | int | float | list | dict | None\n\n\n@dataclass\nclass SettingValue:\n    name: str\n    default: SettingType\n    data_type: str\n    description: str\n    group: str\n    # Whether the setting can be viewed by the client\n    client: bool = False\n    value: SettingType = None\n"
  },
  {
    "path": "backend/app/staff_action/__init__.py",
    "content": "from sqlmodel import Session\n\nfrom app.models import StaffActionLog\n\n\ndef create_staff_action_log(\n    session: Session, action, target_type, target_id, before, after, commit=True\n):\n    staff_action_log = StaffActionLog(\n        action=action,\n        target_type=target_type,\n        target_id=target_id,\n        before=before,\n        after=after,\n    )\n    session.add(staff_action_log)\n    if commit:\n        session.commit()\n    return staff_action_log\n"
  },
  {
    "path": "backend/app/tasks/__init__.py",
    "content": "from .knowledge_base import (\n    import_documents_for_knowledge_base,\n    purge_kb_datasource_related_resources,\n)\nfrom .build_index import (\n    build_index_for_document,\n    build_kg_index_for_chunk,\n)\n\nfrom .evaluate import add_evaluation_task\n\n\n__all__ = [\n    \"build_index_for_document\",\n    \"build_kg_index_for_chunk\",\n    \"import_documents_for_knowledge_base\",\n    \"purge_kb_datasource_related_resources\",\n    \"add_evaluation_task\",\n]\n"
  },
  {
    "path": "backend/app/tasks/build_index.py",
    "content": "import traceback\nfrom uuid import UUID\nfrom sqlmodel import Session\nfrom celery.utils.log import get_task_logger\n\nfrom app.celery import app as celery_app\nfrom app.core.db import engine\nfrom app.models import (\n    Document as DBDocument,\n    DocIndexTaskStatus,\n    KgIndexStatus,\n)\nfrom app.models.chunk import get_kb_chunk_model\nfrom app.models.knowledge_base import IndexMethod\nfrom app.rag.build_index import IndexService\nfrom app.rag.knowledge_base.config import get_kb_llm, get_kb_embed_model\nfrom app.repositories import knowledge_base_repo\nfrom app.repositories.chunk import ChunkRepo\n\nlogger = get_task_logger(__name__)\n\n\n# TODO: refactor: divide into two tasks: build_vector_index_for_document and build_kg_index_for_document\n\n\n@celery_app.task(bind=True)\ndef build_index_for_document(self, knowledge_base_id: int, document_id: int):\n    # Pre-check before building index.\n    with Session(engine, expire_on_commit=False) as session:\n        kb = knowledge_base_repo.must_get(session, knowledge_base_id)\n\n        # Check document.\n        db_document = session.get(DBDocument, document_id)\n        if db_document is None:\n            logger.error(f\"Document #{document_id} is not found\")\n            return\n\n        if db_document.index_status not in (\n            DocIndexTaskStatus.PENDING,\n            DocIndexTaskStatus.NOT_STARTED,\n        ):\n            logger.info(f\"Document #{document_id} is not in pending state\")\n            return\n\n        # Init knowledge base index service。\n        try:\n            llm = get_kb_llm(session, kb)\n            embed_model = get_kb_embed_model(session, kb)\n            index_service = IndexService(llm, embed_model, kb)\n        except ValueError as e:\n            # LLM may not be available yet(eg. bootstrapping), retry after specified time\n            logger.warning(\n                f\"Failed to init index service for document #{document_id} (retry task after 1 minute): {e}\"\n            )\n            raise self.retry(countdown=60)\n\n        db_document.index_status = DocIndexTaskStatus.RUNNING\n        session.add(db_document)\n        session.commit()\n\n    # Build vector index.\n    try:\n        with Session(engine) as index_session:\n            index_service.build_vector_index_for_document(index_session, db_document)\n\n        with Session(engine) as session:\n            db_document.index_status = DocIndexTaskStatus.COMPLETED\n            session.add(db_document)\n            session.commit()\n            logger.info(f\"Built vector index for document #{document_id} successfully.\")\n    except Exception:\n        with Session(engine) as session:\n            error_msg = traceback.format_exc()\n            logger.error(\n                f\"Failed to build vector index for document {document_id}: {error_msg}\"\n            )\n            db_document.index_status = DocIndexTaskStatus.FAILED\n            db_document.index_result = error_msg\n            session.add(db_document)\n            session.commit()\n        return\n\n    # Build knowledge graph index.\n    with Session(engine, expire_on_commit=False) as session:\n        kb = knowledge_base_repo.must_get(session, knowledge_base_id)\n        if IndexMethod.KNOWLEDGE_GRAPH not in kb.index_methods:\n            return\n\n        chunk_repo = ChunkRepo(get_kb_chunk_model(kb))\n        chunks = chunk_repo.get_document_chunks(session, document_id)\n        for chunk in chunks:\n            build_kg_index_for_chunk.delay(knowledge_base_id, chunk.id)\n\n\n@celery_app.task\ndef build_kg_index_for_chunk(knowledge_base_id: int, chunk_id: UUID):\n    with Session(engine, expire_on_commit=False) as session:\n        kb = knowledge_base_repo.must_get(session, knowledge_base_id)\n\n        # Check chunk.\n        chunk_model = get_kb_chunk_model(kb)\n        db_chunk = session.get(chunk_model, chunk_id)\n        if db_chunk is None:\n            logger.error(f\"Chunk #{chunk_id} is not found\")\n            return\n\n        if db_chunk.index_status not in (\n            KgIndexStatus.PENDING,\n            KgIndexStatus.NOT_STARTED,\n        ):\n            logger.info(f\"Chunk #{chunk_id} is not in pending state\")\n            return\n\n        # Init knowledge base index service。\n        llm = get_kb_llm(session, kb)\n        embed_model = get_kb_embed_model(session, kb)\n        index_service = IndexService(llm, embed_model, kb)\n\n        db_chunk.index_status = KgIndexStatus.RUNNING\n        session.add(db_chunk)\n        session.commit()\n\n    try:\n        with Session(engine) as index_session:\n            index_service.build_kg_index_for_chunk(index_session, db_chunk)\n\n        with Session(engine) as session:\n            db_chunk.index_status = KgIndexStatus.COMPLETED\n            session.add(db_chunk)\n            session.commit()\n            logger.info(\n                f\"Built knowledge graph index for chunk #{chunk_id} successfully.\"\n            )\n    except Exception:\n        with Session(engine) as session:\n            error_msg = traceback.format_exc()\n            logger.error(\n                f\"Failed to build knowledge graph index for chunk #{chunk_id}\",\n                exc_info=True,\n            )\n            db_chunk.index_status = KgIndexStatus.FAILED\n            db_chunk.index_result = error_msg\n            session.add(db_chunk)\n            session.commit()\n"
  },
  {
    "path": "backend/app/tasks/evaluate.py",
    "content": "import logging\nimport traceback\n\nfrom llama_index.core.base.llms.types import ChatMessage\n\nfrom app.celery import app as celery_app\nfrom llama_index.llms.openai import OpenAI\nfrom llama_index.embeddings.openai import OpenAIEmbedding\nfrom ragas import EvaluationDataset, evaluate\nfrom ragas.embeddings import LlamaIndexEmbeddingsWrapper\nfrom ragas.llms import LlamaIndexLLMWrapper\nfrom ragas.metrics import FactualCorrectness, SemanticSimilarity\nfrom sqlmodel import Session, select\nfrom celery.utils.log import get_task_logger\nfrom tenacity import retry, stop_after_attempt, wait_fixed\n\nfrom app.core.config import settings, Environment\nfrom app.core.db import engine\nfrom app.models import (\n    EvaluationTask,\n    EvaluationStatus,\n    EvaluationTaskItem,\n)\nfrom dotenv import load_dotenv\n\nfrom app.rag.chat.chat_flow import ChatFlow\nfrom app.rag.chat.stream_protocol import ChatEvent\nfrom app.rag.types import ChatEventType, ChatMessageSate\n\nload_dotenv()\n\nlogger = get_task_logger(__name__)\n\nif settings.ENVIRONMENT == Environment.LOCAL:\n    logger.setLevel(logging.DEBUG)\n\n    for handler in logger.handlers:\n        handler.setLevel(logging.DEBUG)\n\n\n@celery_app.task\ndef add_evaluation_task(evaluation_task_id: int):\n    logger.info(\n        f\"[add_evaluation_task] Enter with evaluation task #{evaluation_task_id}\"\n    )\n\n    with Session(engine, expire_on_commit=False) as session:\n        evaluation_task = session.get(EvaluationTask, evaluation_task_id)\n        if evaluation_task is None:\n            logger.error(f\"Evaluation task #{evaluation_task_id} is not found\")\n            return\n\n        # get eval items\n        eval_item_stmt = select(EvaluationTaskItem).where(\n            EvaluationTaskItem.evaluation_task_id == evaluation_task_id\n        )\n        eval_item_list = session.exec(eval_item_stmt).all()\n        logger.info(f\"[add_evaluation_task] get {len(eval_item_list)} evaluation items\")\n        for eval_item in eval_item_list:\n            logger.debug(type(eval_item))\n            logger.debug(\n                f\"[add_evaluation_task] deal with evaluation item #{eval_item.id}\"\n            )\n            add_evaluation_task_item.delay(eval_item.id)\n\n\n@celery_app.task\ndef add_evaluation_task_item(evaluation_task_item_id: int):\n    logger.info(\n        f\"Enter add_evaluation_task_item with evaluation item #{evaluation_task_item_id}\"\n    )\n\n    with Session(engine, expire_on_commit=False) as session:\n        evaluation_task_item = session.get(EvaluationTaskItem, evaluation_task_item_id)\n        if evaluation_task_item is None:\n            logger.error(f\"Evaluation item #{evaluation_task_item_id} is not found\")\n            return\n        if evaluation_task_item.status != EvaluationStatus.NOT_START:\n            logger.error(\n                f\"Evaluation item #{evaluation_task_item_id} is not in not start state\"\n            )\n            return\n    try:\n        if evaluation_task_item.response is None or evaluation_task_item.response == \"\":\n            response, _ = generate_answer_by_autoflow(\n                [ChatMessage(role=\"assistant\", content=evaluation_task_item.query)],\n                evaluation_task_item.chat_engine,\n            )\n            if response is None or response == \"\":\n                raise Exception(\"Autoflow response is empty\")\n\n            logger.info(\n                f\"Got response from autoflow for evaluation item #{evaluation_task_item_id}, {response}\"\n            )\n            evaluation_task_item.response = response\n            logger.info(f\"Successfully get response item #{evaluation_task_item_id}\")\n\n            with Session(engine, expire_on_commit=False) as session:\n                session.merge(evaluation_task_item)\n                session.commit()\n\n        evaluate_task(evaluation_task_item)\n\n    except Exception as e:\n        logger.error(f\"Failed to evaluate item #{evaluation_task_item_id}, error: {e}\")\n        evaluation_task_item.error_msg = traceback.format_exc()\n        evaluation_task_item.status = EvaluationStatus.ERROR\n\n        with Session(engine, expire_on_commit=False) as session:\n            session.merge(evaluation_task_item)\n            session.commit()\n\n\ndef evaluate_task(evaluation_task_item: EvaluationTaskItem):\n    logger.info(f\"Enter evaluate_task with evaluation item #{evaluation_task_item.id}\")\n    ragas_list = [\n        {\n            \"user_input\": evaluation_task_item.query,\n            \"reference\": evaluation_task_item.reference,\n            \"response\": evaluation_task_item.response,\n        }\n    ]\n    logger.debug(f\"Response data {evaluation_task_item.response}\")\n\n    ragas_dataset = EvaluationDataset.from_list(ragas_list)\n    logger.debug(f\"Dataset {ragas_dataset.to_pandas().head()}\")\n\n    evaluator_llm = LlamaIndexLLMWrapper(\n        OpenAI(model=\"gpt-4o\", api_key=settings.EVALUATION_OPENAI_API_KEY)\n    )\n    evaluator_embeddings = LlamaIndexEmbeddingsWrapper(\n        OpenAIEmbedding(\n            model=\"text-embedding-3-large\", api_key=settings.EVALUATION_OPENAI_API_KEY\n        )\n    )\n\n    metrics = [\n        # LLMContextRecall(llm=evaluator_llm),  # retrieved_contexts required\n        FactualCorrectness(llm=evaluator_llm),\n        # Faithfulness(llm=evaluator_llm),  # retrieved_contexts required\n        SemanticSimilarity(embeddings=evaluator_embeddings),\n    ]\n\n    try:\n        eval_result = evaluate(\n            dataset=ragas_dataset,\n            metrics=metrics,\n            raise_exceptions=True,\n            show_progress=False,\n        )\n\n        logger.debug(\"eval_result to_pandas\")\n        result_list = eval_result.to_pandas().to_dict(orient=\"records\")\n        logger.debug(f\"result list {result_list}\")\n        if len(result_list) != 1:\n            raise Exception(\n                f\"Item {evaluation_task_item.id} cannot get evaluation from ragas\"\n            )\n\n        logger.debug(f\"result {result_list[0]}\")\n        evaluation_task_item.factual_correctness = result_list[0][\n            FactualCorrectness.name\n        ]\n        evaluation_task_item.semantic_similarity = result_list[0][\n            SemanticSimilarity.name\n        ]\n        evaluation_task_item.status = EvaluationStatus.DONE\n\n        logger.info(f\"Result evaluation item #{evaluation_task_item}\")\n        with Session(engine, expire_on_commit=False) as session:\n            session.merge(evaluation_task_item)\n            session.commit()\n    except Exception as e:\n        logger.error(f\"Failed to evaluate item #{evaluation_task_item.id}, error: {e}\")\n        evaluation_task_item.error_msg = traceback.format_exc()\n        evaluation_task_item.status = EvaluationStatus.ERROR\n\n        with Session(engine, expire_on_commit=False) as session:\n            session.merge(evaluation_task_item)\n            session.commit()\n\n\n@retry(stop=stop_after_attempt(2), wait=wait_fixed(5))\ndef generate_answer_by_autoflow(\n    messages: list[ChatMessage], chat_engine: str\n) -> (str, list):\n    with Session(engine, expire_on_commit=False) as session:\n        chat_svc = ChatFlow(\n            db_session=session,\n            user=None,\n            browser_id=\"\",\n            origin=\"evaluation\",\n            chat_messages=messages,\n            engine_name=chat_engine,\n        )\n\n        sources, answer = [], \"\"\n        for m in chat_svc.chat():\n            if not isinstance(m, ChatEvent):\n                continue\n            if m.event_type == ChatEventType.MESSAGE_ANNOTATIONS_PART:\n                if m.payload.state == ChatMessageSate.SOURCE_NODES:\n                    sources = m.payload.context\n            elif m.event_type == ChatEventType.TEXT_PART:\n                answer += m.payload\n            elif m.event_type == ChatEventType.ERROR_PART:\n                raise Exception(m.payload)\n            else:\n                pass\n\n    return answer, sources\n\n\ndef parse_langfuse_trace_id_from_url(trace_url: str) -> str:\n    # Example trace_url: https://us.cloud.langfuse.com/trace/87e7eb2e-b789-4b23-af60-fbcf0fd517a1\n    return trace_url.split(\"/\")[-1]\n"
  },
  {
    "path": "backend/app/tasks/knowledge_base.py",
    "content": "from celery.utils.log import get_task_logger\nfrom sqlalchemy import delete\nfrom sqlmodel import Session\n\nfrom app.celery import app as celery_app\nfrom app.core.db import engine\nfrom app.exceptions import KBNotFound\nfrom app.models import (\n    Document,\n    KnowledgeBaseDataSource,\n    DataSource,\n)\nfrom app.rag.datasource import get_data_source_loader\nfrom app.repositories import knowledge_base_repo, document_repo\nfrom .build_index import build_index_for_document\nfrom ..models.chunk import get_kb_chunk_model\nfrom ..models.entity import get_kb_entity_model\nfrom ..models.relationship import get_kb_relationship_model\nfrom ..rag.knowledge_base.index_store import (\n    get_kb_tidb_vector_store,\n    get_kb_tidb_graph_store,\n)\nfrom ..repositories.chunk import ChunkRepo\nfrom ..repositories.graph import GraphRepo\n\nlogger = get_task_logger(__name__)\n\n\n@celery_app.task\ndef import_documents_for_knowledge_base(kb_id: int):\n    try:\n        with Session(engine) as session:\n            kb = knowledge_base_repo.must_get(session, kb_id)\n            data_sources = kb.data_sources\n            for data_source in data_sources:\n                import_documents_from_kb_datasource(kb.id, data_source.id)\n\n        logger.info(f\"Successfully imported documents for knowledge base #{kb_id}\")\n    except KBNotFound:\n        logger.error(f\"Knowledge base #{kb_id} is not found\")\n    except Exception as e:\n        logger.exception(\n            f\"Failed to import documents for knowledge base #{kb_id}\", exc_info=e\n        )\n\n\n@celery_app.task\ndef import_documents_from_kb_datasource(kb_id: int, data_source_id: int):\n    try:\n        with Session(engine) as session:\n            kb = knowledge_base_repo.must_get(session, kb_id)\n            data_source = knowledge_base_repo.must_get_kb_datasource(\n                session, kb, data_source_id\n            )\n\n            logger.info(\n                f\"Loading documents from data source #{data_source_id} for knowledge base #{kb_id}\"\n            )\n            loader = get_data_source_loader(\n                session,\n                kb_id,\n                data_source.data_source_type,\n                data_source.id,\n                data_source.user_id,\n                data_source.config,\n            )\n\n            for document in loader.load_documents():\n                session.add(document)\n                session.commit()\n\n                build_index_for_document.delay(kb_id, document.id)\n\n        stats_for_knowledge_base.delay(kb_id)\n        logger.info(\n            f\"Successfully imported documents for from datasource #{data_source_id}\"\n        )\n    except Exception as e:\n        logger.exception(\n            f\"Failed to import documents from data source #{data_source_id} of knowledge base #{kb_id}\",\n            exc_info=e,\n        )\n\n\n@celery_app.task\ndef stats_for_knowledge_base(kb_id: int):\n    try:\n        with Session(engine) as session:\n            kb = knowledge_base_repo.must_get(session, kb_id)\n\n            documents_total = knowledge_base_repo.count_documents(session, kb)\n            data_sources_total = knowledge_base_repo.count_data_sources(session, kb)\n\n            kb.documents_total = documents_total\n            kb.data_sources_total = data_sources_total\n\n            session.add(kb)\n            session.commit()\n\n        logger.info(f\"Successfully running stats for knowledge base #{kb_id}\")\n    except KBNotFound:\n        logger.error(f\"Knowledge base #{kb_id} is not found\")\n    except Exception as e:\n        logger.exception(f\"Failed to run stats for knowledge base #{kb_id}\", exc_info=e)\n\n\n@celery_app.task\ndef purge_knowledge_base_related_resources(kb_id: int):\n    \"\"\"\n    Purge all resources related to a knowledge base.\n\n    Related resources:\n        - documents\n        - chunks\n        - indexes\n            - vector index\n            - knowledge graph index\n        - data sources\n    \"\"\"\n\n    with Session(engine) as session:\n        knowledge_base = knowledge_base_repo.must_get(\n            session, kb_id, show_soft_deleted=True\n        )\n        assert knowledge_base.deleted_at is not None\n\n        data_source_ids = [datasource.id for datasource in knowledge_base.data_sources]\n\n        # Drop entities_{kb_id}, relationships_{kb_id} tables.\n        tidb_graph_store = get_kb_tidb_graph_store(session, knowledge_base)\n        tidb_graph_store.drop_table_schema()\n        logger.info(\n            f\"Dropped tidb graph store of knowledge base #{kb_id} successfully.\"\n        )\n\n        # Drop chunks_{kb_id} table.\n        tidb_vector_store = get_kb_tidb_vector_store(session, knowledge_base)\n        tidb_vector_store.drop_table_schema()\n\n        logger.info(\n            f\"Dropped tidb vector store of knowledge base #{kb_id} successfully.\"\n        )\n\n        # Delete documents.\n        stmt = delete(Document).where(Document.knowledge_base_id == kb_id)\n        session.exec(stmt)\n        logger.info(f\"Deleted documents of knowledge base #{kb_id} successfully.\")\n\n        # Delete data sources and links.\n        if len(data_source_ids) > 0:\n            stmt = delete(KnowledgeBaseDataSource).where(\n                KnowledgeBaseDataSource.knowledge_base_id == kb_id\n            )\n            session.exec(stmt)\n            logger.info(\n                f\"Deleted linked data sources of knowledge base #{kb_id} successfully.\"\n            )\n\n            stmt = delete(DataSource).where(DataSource.id.in_(data_source_ids))\n            session.exec(stmt)\n            logger.info(\n                f\"Deleted data sources {', '.join([f'#{did}' for did in data_source_ids])} successfully.\"\n            )\n\n        # Delete knowledge base.\n        session.delete(knowledge_base)\n        logger.info(f\"Deleted knowledge base #{kb_id} successfully.\")\n\n        session.commit()\n\n\n@celery_app.task\ndef purge_kb_datasource_related_resources(kb_id: int, datasource_id: int):\n    \"\"\"\n    Purge all resources related to the deleted datasource in the knowledge base.\n    \"\"\"\n\n    with Session(engine) as session:\n        kb = knowledge_base_repo.must_get(session, kb_id, show_soft_deleted=True)\n        datasource = knowledge_base_repo.must_get_kb_datasource(\n            session, kb, datasource_id, show_soft_deleted=True\n        )\n        assert datasource.deleted_at is not None\n\n        chunk_model = get_kb_chunk_model(kb)\n        entity_model = get_kb_entity_model(kb)\n        relationship_model = get_kb_relationship_model(kb)\n\n        chunk_repo = ChunkRepo(chunk_model)\n        graph_repo = GraphRepo(entity_model, relationship_model, chunk_model)\n\n        graph_repo.delete_data_source_relationships(session, datasource_id)\n        logger.info(\n            f\"Deleted relationships generated by chunks from data source #{datasource_id} successfully.\"\n        )\n\n        graph_repo.delete_orphaned_entities(session)\n        logger.info(\"Deleted orphaned entities successfully.\")\n\n        chunk_repo.delete_by_datasource(session, datasource_id)\n        logger.info(f\"Deleted chunks from data source #{datasource_id} successfully.\")\n\n        document_repo.delete_by_datasource(session, datasource_id)\n        logger.info(\n            f\"Deleted documents from data source #{datasource_id} successfully.\"\n        )\n\n        session.delete(datasource)\n        logger.info(f\"Deleted data source #{datasource_id} successfully.\")\n\n        session.commit()\n\n    stats_for_knowledge_base.delay(kb_id)\n"
  },
  {
    "path": "backend/app/types.py",
    "content": "import enum\n\n\nclass MimeTypes(str, enum.Enum):\n    PLAIN_TXT = \"text/plain\"\n    MARKDOWN = \"text/markdown\"\n    PDF = \"application/pdf\"\n    DOCX = \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\"\n    PPTX = \"application/vnd.openxmlformats-officedocument.presentationml.presentation\"\n    XLSX = \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\"\n    CSV = \"text/csv\"\n"
  },
  {
    "path": "backend/app/utils/aes.py",
    "content": "import os\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives import padding\n\n\nclass AESCipher:\n    def __init__(self, key: bytes) -> None:\n        self.key = key\n        self.backend = default_backend()\n\n    def encrypt(self, plain_text: str) -> bytes:\n        # generate a random initialization vector\n        iv = os.urandom(16)\n        cipher = Cipher(algorithms.AES(self.key), modes.CFB(iv), backend=self.backend)\n        encryptor = cipher.encryptor()\n\n        # fill the last block with padding\n        padder = padding.PKCS7(algorithms.AES.block_size).padder()\n        padded_data = padder.update(plain_text.encode()) + padder.finalize()\n        encrypted = encryptor.update(padded_data) + encryptor.finalize()\n        return iv + encrypted\n\n    def decrypt(self, encrypted_text: bytes) -> str:\n        # get the initialization vector and the encrypted data\n        iv = encrypted_text[:16]\n        encrypted_data = encrypted_text[16:]\n\n        cipher = Cipher(algorithms.AES(self.key), modes.CFB(iv), backend=self.backend)\n        decryptor = cipher.decryptor()\n\n        # remove the padding\n        decrypted_padded = decryptor.update(encrypted_data) + decryptor.finalize()\n        unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()\n        decrypted = unpadder.update(decrypted_padded) + unpadder.finalize()\n        return decrypted\n"
  },
  {
    "path": "backend/app/utils/namespace.py",
    "content": "from typing import Optional\n\n\ndef format_namespace(namespace: Optional[str] = None) -> str:\n    return namespace.replace(\"-\", \"_\") if namespace else \"\"\n"
  },
  {
    "path": "backend/app/utils/singleflight_cache.py",
    "content": "import threading\nfrom functools import wraps\n\n\ndef singleflight_cache(func):\n    \"\"\"\n    A thread-safe cache decorator implementing the 'singleflight' pattern.\n\n    The singleflight pattern ensures that for any given set of arguments,\n    concurrent calls to the decorated function will only result in a single\n    actual execution. Other threads with the same arguments will wait for\n    the first execution to complete and then receive the same result,\n    rather than triggering duplicate computations.\n\n    This is especially useful for expensive or resource-intensive operations\n    where you want to avoid redundant work and prevent cache stampede.\n\n    Example:\n        @singleflight_cache\n        def load_data(key):\n            # expensive operation\n            ...\n\n        # In multiple threads:\n        load_data('foo')  # Only one thread will actually execute the function for 'foo'\n    \"\"\"\n    _cache = {}\n    _locks = {}\n    _locks_lock = threading.Lock()\n\n    @wraps(func)\n    def wrapper(*args, **kwargs):\n        key = args + tuple(sorted(kwargs.items()))\n        if key in _cache:\n            return _cache[key]\n        with _locks_lock:\n            lock = _locks.setdefault(key, threading.Lock())\n        with lock:\n            if key in _cache:\n                return _cache[key]\n            result = func(*args, **kwargs)\n            _cache[key] = result\n            return result\n\n    return wrapper\n"
  },
  {
    "path": "backend/app/utils/tracing.py",
    "content": "from contextlib import contextmanager\nfrom typing import Optional, Generator\nfrom langfuse.client import StatefulSpanClient, StatefulClient\nfrom langfuse.llama_index import LlamaIndexInstrumentor\nfrom langfuse.llama_index._context import langfuse_instrumentor_context\n\n\nclass LangfuseContextManager:\n    langfuse_client: Optional[StatefulSpanClient] = None\n\n    def __init__(self, instrumentor: LlamaIndexInstrumentor):\n        self.instrumentor = instrumentor\n\n    @contextmanager\n    def observe(self, **kwargs):\n        try:\n            self.instrumentor.start()\n            with self.instrumentor.observe(**kwargs) as trace_client:\n                trace_client.update(name=kwargs.get(\"trace_name\"), **kwargs)\n                self.langfuse_client = trace_client\n                yield trace_client\n        except Exception:\n            raise\n        finally:\n            self.instrumentor.flush()\n            self.instrumentor.stop()\n\n    @contextmanager\n    def span(\n        self, parent_client: Optional[StatefulClient] = None, **kwargs\n    ) -> Generator[\"StatefulSpanClient\", None, None]:\n        if parent_client:\n            client = parent_client\n        else:\n            client = self.langfuse_client\n        span = client.span(**kwargs)\n\n        ctx = langfuse_instrumentor_context.get().copy()\n        old_parent_observation_id = ctx.get(\"parent_observation_id\")\n        langfuse_instrumentor_context.get().update(\n            {\n                \"parent_observation_id\": span.id,\n            }\n        )\n\n        try:\n            yield span\n        except Exception:\n            raise\n        finally:\n            ctx.update(\n                {\n                    \"parent_observation_id\": old_parent_observation_id,\n                }\n            )\n            langfuse_instrumentor_context.get().update(ctx)\n\n    @property\n    def trace_id(self) -> Optional[str]:\n        if self.langfuse_client:\n            return self.langfuse_client.trace_id\n        else:\n            return None\n\n    @property\n    def trace_url(self) -> Optional[str]:\n        if self.langfuse_client:\n            return self.langfuse_client.get_trace_url()\n        else:\n            return None\n"
  },
  {
    "path": "backend/app/utils/uuid6.py",
    "content": "r\"\"\"UUID draft version objects (universally unique identifiers).\nThis module provides the functions uuid6() and uuid7() for\ngenerating version 6 and 7 UUIDs as specified in\nhttps://github.com/uuid6/uuid6-ietf-draft.\n\nRepo: https://github.com/oittaa/uuid6-python\n\nCopy from: https://github.com/jonra1993/fastapi-alembic-sqlmodel-async/blob/main/backend/app/app/utils/uuid6.py\n\"\"\"\n\nimport secrets\nimport time\nimport uuid\n\n\nclass UUID(uuid.UUID):\n    r\"\"\"UUID draft version objects\"\"\"\n\n    def __init__(\n        self,\n        hex: str = None,\n        bytes: bytes = None,\n        bytes_le: bytes = None,\n        fields: tuple[int, int, int, int, int, int] = None,\n        int: int = None,\n        version: int = None,\n        *,\n        is_safe=uuid.SafeUUID.unknown,\n    ) -> None:\n        r\"\"\"Create a UUID.\"\"\"\n\n        if int is None or [hex, bytes, bytes_le, fields].count(None) != 4:\n            super().__init__(\n                hex=hex,\n                bytes=bytes,\n                bytes_le=bytes_le,\n                fields=fields,\n                int=int,\n                version=version,\n                is_safe=is_safe,\n            )\n        if not 0 <= int < 1 << 128:\n            raise ValueError(\"int is out of range (need a 128-bit value)\")\n        if version is not None:\n            if not 6 <= version <= 7:\n                raise ValueError(\"illegal version number\")\n            # Set the variant to RFC 4122.\n            int &= ~(0xC000 << 48)\n            int |= 0x8000 << 48\n            # Set the version number.\n            int &= ~(0xF000 << 64)\n            int |= version << 76\n        super().__init__(int=int, is_safe=is_safe)\n\n    @property\n    def subsec(self) -> int:\n        return ((self.int >> 64) & 0x0FFF) << 8 | ((self.int >> 54) & 0xFF)\n\n    @property\n    def time(self) -> int:\n        if self.version == 6:\n            return (\n                (self.time_low << 28)\n                | (self.time_mid << 12)\n                | (self.time_hi_version & 0x0FFF)\n            )\n        if self.version == 7:\n            return (self.int >> 80) * 10**6 + _subsec_decode(self.subsec)\n        return super().time\n\n\ndef _subsec_decode(value: int) -> int:\n    return -(-value * 10**6 // 2**20)\n\n\ndef _subsec_encode(value: int) -> int:\n    return value * 2**20 // 10**6\n\n\n_last_v6_timestamp = None\n_last_v7_timestamp = None\n\n\ndef uuid6(clock_seq: int = None) -> UUID:\n    r\"\"\"UUID version 6 is a field-compatible version of UUIDv1, reordered for\n    improved DB locality.  It is expected that UUIDv6 will primarily be\n    used in contexts where there are existing v1 UUIDs.  Systems that do\n    not involve legacy UUIDv1 SHOULD consider using UUIDv7 instead.\n    If 'clock_seq' is given, it is used as the sequence number;\n    otherwise a random 14-bit sequence number is chosen.\"\"\"\n\n    global _last_v6_timestamp\n\n    nanoseconds = time.time_ns()\n    # 0x01b21dd213814000 is the number of 100-ns intervals between the\n    # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.\n    timestamp = nanoseconds // 100 + 0x01B21DD213814000\n    if _last_v6_timestamp is not None and timestamp <= _last_v6_timestamp:\n        timestamp = _last_v6_timestamp + 1\n    _last_v6_timestamp = timestamp\n    if clock_seq is None:\n        clock_seq = secrets.randbits(14)  # instead of stable storage\n    node = secrets.randbits(48)\n    time_high_and_time_mid = (timestamp >> 12) & 0xFFFFFFFFFFFF\n    time_low_and_version = timestamp & 0x0FFF\n    uuid_int = time_high_and_time_mid << 80\n    uuid_int |= time_low_and_version << 64\n    uuid_int |= (clock_seq & 0x3FFF) << 48\n    uuid_int |= node\n    return UUID(int=uuid_int, version=6)\n\n\ndef uuid7() -> UUID:\n    r\"\"\"UUID version 7 features a time-ordered value field derived from the\n    widely implemented and well known Unix Epoch timestamp source, the\n    number of milliseconds seconds since midnight 1 Jan 1970 UTC, leap\n    seconds excluded.  As well as improved entropy characteristics over\n    versions 1 or 6.\n    Implementations SHOULD utilize UUID version 7 over UUID version 1 and\n    6 if possible.\"\"\"\n\n    global _last_v7_timestamp\n\n    nanoseconds = time.time_ns()\n    if _last_v7_timestamp is not None and nanoseconds <= _last_v7_timestamp:\n        nanoseconds = _last_v7_timestamp + 1\n    _last_v7_timestamp = nanoseconds\n    timestamp_ms, timestamp_ns = divmod(nanoseconds, 10**6)\n    subsec = _subsec_encode(timestamp_ns)\n    subsec_a = subsec >> 8\n    subsec_b = subsec & 0xFF\n    rand = secrets.randbits(54)\n    uuid_int = (timestamp_ms & 0xFFFFFFFFFFFF) << 80\n    uuid_int |= subsec_a << 64\n    uuid_int |= subsec_b << 54\n    uuid_int |= rand\n    return UUID(int=uuid_int, version=7)\n"
  },
  {
    "path": "backend/bootstrap.py",
    "content": "import secrets\nimport asyncio\nfrom sqlmodel import select, func\nfrom sqlmodel.ext.asyncio.session import AsyncSession\nfrom colorama import Fore, Style\nimport click\n\nfrom app.core.db import get_db_async_session_context\nfrom app.models import User, ChatEngine\n\n\nasync def ensure_admin_user(\n    session: AsyncSession, email: str | None = None, password: str | None = None\n) -> None:\n    result = await session.exec(select(User).where(User.is_superuser == True))\n    user = result.first()\n    if not user:\n        from app.auth.users import create_user\n\n        admin_email = email or \"admin@example.com\"\n        admin_password = password or secrets.token_urlsafe(16)\n        user = await create_user(\n            session,\n            email=admin_email,\n            password=admin_password,\n            is_active=True,\n            is_verified=True,\n            is_superuser=True,\n        )\n        print(Fore.RED + \"\\n\" + \"!\" * 80)\n        print(\n            Fore.RED + \"[IMPORTANT] Admin user created with email: \"\n            f\"{admin_email} and password: {admin_password}\"\n        )\n        print(Fore.RED + \"!\" * 80 + \"\\n\" + Style.RESET_ALL)\n    else:\n        print(Fore.YELLOW + \"Admin user already exists, skipping...\")\n\n\nasync def reset_admin_password(\n    session: AsyncSession, new_password: str | None = None\n) -> None:\n    result = await session.exec(select(User).where(User.is_superuser == True))\n    user = result.first()\n    if not user:\n        print(Fore.YELLOW + \"Admin user does not exist, skipping reset password...\")\n    else:\n        from app.auth.users import update_user_password\n\n        admin_password = new_password or secrets.token_urlsafe(16)\n        updated_user = await update_user_password(\n            session,\n            user_id=user.id,\n            new_password=admin_password,\n        )\n        print(\n            Fore.GREEN + \"Admin user password reset SUCCESS!\\n\"\n            f\"email: {updated_user.email} \\n\"\n            f\"password: {admin_password}\" + Style.RESET_ALL\n        )\n\n\nasync def ensure_default_chat_engine(session: AsyncSession) -> None:\n    result = await session.scalar(func.count(ChatEngine.id))\n    if result == 0:\n        from app.rag.chat.config import ChatEngineConfig\n\n        chat_engine = ChatEngine(\n            name=\"default\",\n            engine_options=ChatEngineConfig().model_dump(),\n            is_default=True,\n        )\n        session.add(chat_engine)\n        await session.commit()\n        print(\"Default chat engine created.\")\n    else:\n        print(Fore.YELLOW + \"Default chat engine already exists, skipping...\")\n\n\nasync def bootstrap(\n    email: str | None = None, password: str | None = None, reset_password: bool = False\n) -> None:\n    async with get_db_async_session_context() as session:\n        await ensure_admin_user(session, email, password)\n        await ensure_default_chat_engine(session)\n        if reset_password:\n            await reset_admin_password(session, password)\n\n\n@click.command()\n@click.option(\n    \"--email\", default=None, help=\"Admin user email, default=admin@example.com\"\n)\n@click.option(\n    \"--password\", default=None, help=\"Admin user password, default=random generated\"\n)\n@click.option(\"--reset-password\", \"-r\", is_flag=True, help=\"Reset admin user password.\")\ndef main(email: str | None, password: str | None, reset_password: bool):\n    \"\"\"Bootstrap the application with optional admin credentials.\"\"\"\n    print(Fore.GREEN + \"Bootstrapping the application...\" + Style.RESET_ALL)\n    asyncio.run(bootstrap(email, password, reset_password))\n    print(Fore.GREEN + \"Bootstrapping completed.\" + Style.RESET_ALL)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "backend/dspy_compiled_program/decompose_query/demos.json",
    "content": "[\n    {\n      \"augmented\": true,\n      \"query\": \"I am designing a table based on TiDB's TTL feature, but when I try to create the table using a cluster created with Serverless, I get a `'TTL' is not supported on TiDB Serverless` error.\\n\\nI plan to use Dedicated on my production environment and Serverless on my development environment, so it would be helpful if the TTL feature could be used in a Serverless environment.\\n\\nI've read the documentation that says Serverless will support TTL features in the future, but is there a specific timeline for this?\\n\\nAlso, is it possible to prevent TTL syntax from causing errors in Serverless?\",\n      \"subquestions\": \"```json\\n{\\n  \\\"questions\\\": [\\n    {\\n      \\\"question\\\": \\\"Why is the TTL feature not supported on TiDB Serverless?\\\",\\n      \\\"reasoning\\\": \\\"Understanding the current limitations of TiDB Serverless regarding the TTL feature.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"Is there a specific timeline for when TiDB Serverless will support the TTL feature?\\\",\\n      \\\"reasoning\\\": \\\"The user wants to know when they can expect the TTL feature to be available in TiDB Serverless.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"How can I prevent TTL syntax from causing errors in TiDB Serverless?\\\",\\n      \\\"reasoning\\\": \\\"The user needs a workaround to avoid errors when using TTL syntax in their development environment.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the differences between TiDB Serverless and TiDB Dedicated regarding TTL feature support?\\\",\\n      \\\"reasoning\\\": \\\"Understanding the differences will help the user plan their production and development environments effectively.\\\"\\n    }\\n  ]\\n}\\n```\"\n    },\n    {\n      \"augmented\": true,\n      \"query\": \"What are the feature changes in the latest version compared to v7.0 for TiDB?\",\n      \"subquestions\": \"```json\\n{\\n  \\\"questions\\\": [\\n    {\\n      \\\"question\\\": \\\"What is the latest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To compare feature changes, we need to identify the latest version of TiDB.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the new features introduced in the latest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To understand the changes, we need to know what new features have been added in the latest version.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What features were present in TiDB v7.0?\\\",\\n      \\\"reasoning\\\": \\\"To make a comparison, we need to know the features available in TiDB v7.0.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What features have been removed or deprecated in the latest version compared to v7.0?\\\",\\n      \\\"reasoning\\\": \\\"Identifying removed or deprecated features helps in understanding the differences between the two versions.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the improvements or changes in existing features from v7.0 to the latest version?\\\",\\n      \\\"reasoning\\\": \\\"To provide a comprehensive comparison, we need to know how existing features have been improved or changed.\\\"\\n    }\\n  ]\\n}\\n```\"\n    },\n    {\n      \"augmented\": true,\n      \"query\": \"summary the performance improvement from version 6.5 to newest version for TiDB\",\n      \"subquestions\": \"```json\\n{\\n  \\\"questions\\\": [\\n    {\\n      \\\"question\\\": \\\"What is the newest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To understand the performance improvements, we first need to identify the latest version of TiDB.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What were the performance metrics and benchmarks for TiDB version 6.5?\\\",\\n      \\\"reasoning\\\": \\\"To compare performance improvements, we need to establish a baseline by understanding the performance metrics of version 6.5.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the performance metrics and benchmarks for the newest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To identify improvements, we need to know the performance metrics of the latest version.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What specific performance improvements have been made from TiDB version 6.5 to the newest version?\\\",\\n      \\\"reasoning\\\": \\\"This question directly addresses the user's query by comparing the performance metrics between the two versions.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"Are there any notable features or optimizations in the newest version that contribute to performance improvements?\\\",\\n      \\\"reasoning\\\": \\\"Understanding the features or optimizations that lead to performance improvements can provide deeper insights into the changes made.\\\"\\n    }\\n  ]\\n}\\n```\"\n    },\n    {\n      \"query\": \"tidb lighting to sync to serverless cluster,but the load command and the tidb-lighting tools dont have the tls config like --ssl-ca or --ca. so i can not sync to the full back data to the serverless\",\n      \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"Sync data to a serverless cluster using TiDB Lighting.\\\",\\\"reasoning\\\":\\\"The user case what the user wants to achieve\\\"},{\\\"question\\\":\\\"How to configure TLS for TiDB Lightning?\\\",\\\"reasoning\\\":\\\"The basic question what the user itentionally asked.\\\"},{\\\"question\\\":\\\"The sync issue is caused by the lack of TLS configuration options for TiDB Lightning.\\\",\\\"reasoning\\\":\\\"The problem that the user is facing.\\\"}]}\"\n    },\n    {\n      \"query\": \"We are new to TiDB and don't quite understand the potential impact on our application architecture. We are using TiDB for audit logs and continue to direct traffic to TiDB. We noticed a sudden jump ID from 1 to 30,001. Are there any impacts? Do we need to address this? If we have 100 connections from several applications, what will happen? In summary, what should we do for Auto Increment or do nothing?\",\n      \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"Why Auto Increment in TiDB causes a sudden increase in the ID values?\\\",\\\"reasoning\\\":\\\"The main concerns that the user itentionally asked.\\\"},{\\\"question\\\":\\\"How 100 connections from several applications affect TiDB, especially when the Auto Increment causes a sudden jump in ID values?\\\",\\\"reasoning\\\":\\\"The second most important question that the user itentionally asked.\\\"},{\\\"question\\\":\\\"TiDB is used for storing audit logs and receiving continuous traffic.\\\",\\\"reasoning\\\":\\\"The user case what the user wants to achieve\\\"}]}\"\n    },\n    {\n      \"query\": \"Upgrade TiDB Serverless to 7.4 or latest for enhanced MySQL 8.0 compatibility\",\n      \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"TiDB 7.4 or the latest version enhances compatibility with MySQL 8.0\\\",\\\"reasoning\\\":\\\"The reasoning why user wants to upgrade TiDB Serverless to 7.4 or latest for enhanced MySQL 8.0 compatibility\\\"},{\\\"question\\\":\\\"How to upgrade TiDB Serverless?\\\",\\\"reasoning\\\":\\\"The basic question what the user itentionally asked.\\\"}]}\"\n    },\n    {\n      \"query\": \"I am current using tidb serverless, but as my product grows, I really need a dalicated cluster. Is there a solution helps finish the migration?\",\n      \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"How to migrate from TiDB serverless to TiDB dedicated cluster?\\\",\\\"reasoning\\\":\\\"The main concern of the user.\\\"}]}\"\n    },\n    {\n      \"query\": \"Chat2query is returning an error message saying \\\"Query timeout expired\\\". Additionally, I am unable to locate this SQL query in the slow query log.\",\n      \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"Chat2query is returning an error message saying 'Query timeout expired'.\\\",\\\"reasoning\\\":\\\"The main problem the user is facing.\\\"},{\\\"question\\\":\\\"The reason why not to locate the SQL query in the slow query log.\\\",\\\"reasoning\\\":\\\"The secondary problem the user is facing.\\\"}]}\"\n    }\n]"
  },
  {
    "path": "backend/dspy_compiled_program/decompose_query/program.json",
    "content": "{\n  \"prog\": {\n    \"traces\": [],\n    \"train\": [],\n    \"demos\": [\n      {\n        \"augmented\": true,\n        \"query\": \"I am designing a table based on TiDB's TTL feature, but when I try to create the table using a cluster created with Serverless, I get a `'TTL' is not supported on TiDB Serverless` error.\\n\\nI plan to use Dedicated on my production environment and Serverless on my development environment, so it would be helpful if the TTL feature could be used in a Serverless environment.\\n\\nI've read the documentation that says Serverless will support TTL features in the future, but is there a specific timeline for this?\\n\\nAlso, is it possible to prevent TTL syntax from causing errors in Serverless?\",\n        \"subquestions\": \"```json\\n{\\n  \\\"questions\\\": [\\n    {\\n      \\\"question\\\": \\\"Why is the TTL feature not supported on TiDB Serverless?\\\",\\n      \\\"reasoning\\\": \\\"Understanding the current limitations of TiDB Serverless regarding the TTL feature.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"Is there a specific timeline for when TiDB Serverless will support the TTL feature?\\\",\\n      \\\"reasoning\\\": \\\"The user wants to know when they can expect the TTL feature to be available in TiDB Serverless.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"How can I prevent TTL syntax from causing errors in TiDB Serverless?\\\",\\n      \\\"reasoning\\\": \\\"The user needs a workaround to avoid errors when using TTL syntax in their development environment.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the differences between TiDB Serverless and TiDB Dedicated regarding TTL feature support?\\\",\\n      \\\"reasoning\\\": \\\"Understanding the differences will help the user plan their production and development environments effectively.\\\"\\n    }\\n  ]\\n}\\n```\"\n      },\n      {\n        \"augmented\": true,\n        \"query\": \"What are the feature changes in the latest version compared to v7.0 for TiDB?\",\n        \"subquestions\": \"```json\\n{\\n  \\\"questions\\\": [\\n    {\\n      \\\"question\\\": \\\"What is the latest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To compare feature changes, we need to identify the latest version of TiDB.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the new features introduced in the latest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To understand the changes, we need to know what new features have been added in the latest version.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What features were present in TiDB v7.0?\\\",\\n      \\\"reasoning\\\": \\\"To make a comparison, we need to know the features available in TiDB v7.0.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What features have been removed or deprecated in the latest version compared to v7.0?\\\",\\n      \\\"reasoning\\\": \\\"Identifying removed or deprecated features helps in understanding the differences between the two versions.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the improvements or changes in existing features from v7.0 to the latest version?\\\",\\n      \\\"reasoning\\\": \\\"To provide a comprehensive comparison, we need to know how existing features have been improved or changed.\\\"\\n    }\\n  ]\\n}\\n```\"\n      },\n      {\n        \"augmented\": true,\n        \"query\": \"summary the performance improvement from version 6.5 to newest version for TiDB\",\n        \"subquestions\": \"```json\\n{\\n  \\\"questions\\\": [\\n    {\\n      \\\"question\\\": \\\"What is the newest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To understand the performance improvements, we first need to identify the latest version of TiDB.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What were the performance metrics and benchmarks for TiDB version 6.5?\\\",\\n      \\\"reasoning\\\": \\\"To compare performance improvements, we need to establish a baseline by understanding the performance metrics of version 6.5.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What are the performance metrics and benchmarks for the newest version of TiDB?\\\",\\n      \\\"reasoning\\\": \\\"To identify improvements, we need to know the performance metrics of the latest version.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"What specific performance improvements have been made from TiDB version 6.5 to the newest version?\\\",\\n      \\\"reasoning\\\": \\\"This question directly addresses the user's query by comparing the performance metrics between the two versions.\\\"\\n    },\\n    {\\n      \\\"question\\\": \\\"Are there any notable features or optimizations in the newest version that contribute to performance improvements?\\\",\\n      \\\"reasoning\\\": \\\"Understanding the features or optimizations that lead to performance improvements can provide deeper insights into the changes made.\\\"\\n    }\\n  ]\\n}\\n```\"\n      },\n      {\n        \"query\": \"tidb lighting to sync to serverless cluster,but the load command and the tidb-lighting tools dont have the tls config like --ssl-ca or --ca. so i can not sync to the full back data to the serverless\",\n        \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"Sync data to a serverless cluster using TiDB Lighting.\\\",\\\"reasoning\\\":\\\"The user case what the user wants to achieve\\\"},{\\\"question\\\":\\\"How to configure TLS for TiDB Lightning?\\\",\\\"reasoning\\\":\\\"The basic question what the user itentionally asked.\\\"},{\\\"question\\\":\\\"The sync issue is caused by the lack of TLS configuration options for TiDB Lightning.\\\",\\\"reasoning\\\":\\\"The problem that the user is facing.\\\"}]}\"\n      },\n      {\n        \"query\": \"We are new to TiDB and don't quite understand the potential impact on our application architecture. We are using TiDB for audit logs and continue to direct traffic to TiDB. We noticed a sudden jump ID from 1 to 30,001. Are there any impacts? Do we need to address this? If we have 100 connections from several applications, what will happen? In summary, what should we do for Auto Increment or do nothing?\",\n        \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"Why Auto Increment in TiDB causes a sudden increase in the ID values?\\\",\\\"reasoning\\\":\\\"The main concerns that the user itentionally asked.\\\"},{\\\"question\\\":\\\"How 100 connections from several applications affect TiDB, especially when the Auto Increment causes a sudden jump in ID values?\\\",\\\"reasoning\\\":\\\"The second most important question that the user itentionally asked.\\\"},{\\\"question\\\":\\\"TiDB is used for storing audit logs and receiving continuous traffic.\\\",\\\"reasoning\\\":\\\"The user case what the user wants to achieve\\\"}]}\"\n      },\n      {\n        \"query\": \"Upgrade TiDB Serverless to 7.4 or latest for enhanced MySQL 8.0 compatibility\",\n        \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"TiDB 7.4 or the latest version enhances compatibility with MySQL 8.0\\\",\\\"reasoning\\\":\\\"The reasoning why user wants to upgrade TiDB Serverless to 7.4 or latest for enhanced MySQL 8.0 compatibility\\\"},{\\\"question\\\":\\\"How to upgrade TiDB Serverless?\\\",\\\"reasoning\\\":\\\"The basic question what the user itentionally asked.\\\"}]}\"\n      },\n      {\n        \"query\": \"I am current using tidb serverless, but as my product grows, I really need a dalicated cluster. Is there a solution helps finish the migration?\",\n        \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"How to migrate from TiDB serverless to TiDB dedicated cluster?\\\",\\\"reasoning\\\":\\\"The main concern of the user.\\\"}]}\"\n      },\n      {\n        \"query\": \"Chat2query is returning an error message saying \\\"Query timeout expired\\\". Additionally, I am unable to locate this SQL query in the slow query log.\",\n        \"subquestions\": \"{\\\"questions\\\":[{\\\"question\\\":\\\"Chat2query is returning an error message saying 'Query timeout expired'.\\\",\\\"reasoning\\\":\\\"The main problem the user is facing.\\\"},{\\\"question\\\":\\\"The reason why not to locate the SQL query in the slow query log.\\\",\\\"reasoning\\\":\\\"The secondary problem the user is facing.\\\"}]}\"\n      }\n    ],\n    \"signature\": {\n      \"instructions\": \"You are an expert in knowledge base graph construction, specializing in building comprehensive knowledge graphs.\\nYour current task is to deconstruct the user's query into a series of step-by-step questions.\\n\\n## Instructions:\\n\\n1. Dependency Analysis:\\n\\n    - Analyze the user's query to identify the underlying dependencies and relationships between different components.\\n    - Construct a dependency graph that visually represents these relationships.\\n\\n2. Question Breakdown: Divide the query into a sequence of step-by-step questions necessary to address the main query comprehensively.\\n\\n3. Provide Reasoning: Explain the rationale behind each question.\\n\\n4. Constraints:\\n    - Limit the output to no more than 5 questions to maintain focus and relevance.\\n    - Ensure accuracy by reflecting the user's true intentions based on the provided query.\\n    - Ground all questions in factual information derived directly from the user's input.\\n\\nPlease only response in JSON format.\",\n      \"fields\": [\n        {\n          \"prefix\": \"Query:\",\n          \"description\": \"The query text to extract the user's step-by-step questions.\"\n        },\n        {\n          \"prefix\": \"Subquestions:\",\n          \"description\": \"Representation of the user's step-by-step questions extracted from the query.\"\n        }\n      ]\n    },\n    \"lm\": null\n  },\n  \"metadata\": {\n    \"dependency_versions\": {}\n  }\n}"
  },
  {
    "path": "backend/dspy_program.py",
    "content": "import os\n\nimport dspy\nfrom app.rag.question_gen.query_decomposer import DecomposeQueryModule\n\n\ndef save_decompose_query_program():\n    dspy_lm = dspy.LM(model=\"gpt-4o-mini\", api_key=os.getenv(\"OPENAI_API_KEY\"))\n    module = DecomposeQueryModule(dspy_lm)\n    module.save(\"dspy_compiled_program/decompose_query/program.json\")\n\n\nif __name__ == \"__main__\":\n    save_decompose_query_program()\n"
  },
  {
    "path": "backend/local_embedding_reranker/.dockerignore",
    "content": ".venv"
  },
  {
    "path": "backend/local_embedding_reranker/Dockerfile",
    "content": "FROM python:3.11.9-slim\n\nWORKDIR /app\n\nCOPY requirements.txt /app/requirements.txt\nRUN PYTHONDONTWRITEBYTECODE=1 pip install --no-cache-dir -r /app/requirements.txt\n\nCOPY . /app/\n\nENV PYTHONPATH=/app\n\nEXPOSE 5001\n\nCMD [\"uvicorn\", \"main:app\", \"--port\", \"5001\", \"--host\", \"0.0.0.0\"]"
  },
  {
    "path": "backend/local_embedding_reranker/main.py",
    "content": "import logging\nimport uvicorn\nfrom pydantic import BaseModel\nfrom fastapi import FastAPI, APIRouter\nfrom sentence_transformers import SentenceTransformer, CrossEncoder\nfrom contextlib import asynccontextmanager\nfrom environs import Env\n\nenv = Env()\nenv.read_env()\n\nlogging.basicConfig(\n    format=\"%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s\",\n    level=logging.INFO,\n)\nlogger = logging.getLogger(__name__)\n\n\nPRE_LOAD_DEFAULT_EMBEDDING_MODEL = env.bool(\n    \"PRE_LOAD_DEFAULT_EMBEDDING_MODEL\", default=True\n)\nPRE_LOAD_DEFAULT_RERANKER_MODEL = env.bool(\n    \"PRE_LOAD_DEFAULT_RERANKER_MODEL\", default=False\n)\nDEFAULT_EMBEDDING_MODEL = env.str(\"DEFAULT_EMBEDDING_MODEL\", default=\"BAAI/bge-m3\")\nDEFAULT_RERANKER_MODEL = env.str(\n    \"DEFAULT_RERANKER_MODEL\", default=\"BAAI/bge-reranker-v2-m3\"\n)\nrouter = APIRouter()\n\n\n@router.get(\"/healthz\")\ndef healthz():\n    return \"OK\"\n\n\nEMBEDDING_MODEL_DICT: dict[str, SentenceTransformer] = {}\nRERANKER_MODEL_DICT: dict[str, CrossEncoder] = {}\n\n\ndef get_embedding_model(model_name: str) -> SentenceTransformer:\n    global EMBEDDING_MODEL_DICT\n    embed_model = EMBEDDING_MODEL_DICT.get(model_name)\n    if not embed_model:\n        embed_model = SentenceTransformer(\n            model_name_or_path=model_name,\n            trust_remote_code=True,\n        )\n        EMBEDDING_MODEL_DICT[model_name] = embed_model\n    return embed_model\n\n\ndef get_reranker_model(model_name: str) -> CrossEncoder:\n    global RERANKER_MODEL_DICT\n    reranker_model = RERANKER_MODEL_DICT.get(model_name)\n    if not reranker_model:\n        reranker_model = CrossEncoder(\n            model_name=model_name,\n            automodel_args={\"torch_dtype\": \"auto\"},\n            trust_remote_code=True,\n        )\n        RERANKER_MODEL_DICT[model_name] = reranker_model\n    return reranker_model\n\n\nclass EmbeddingRequest(BaseModel):\n    sentences: list[str]\n    model: str = DEFAULT_EMBEDDING_MODEL\n    normalize_embeddings: bool = True\n\n\nclass EmbeddingResponse(BaseModel):\n    model: str\n    embeddings: list[list]\n\n\n@router.post(\"/embedding\")\ndef get_texts_embedding(request: EmbeddingRequest) -> EmbeddingResponse:\n    embed_model = get_embedding_model(model_name=request.model)\n    embeddings = embed_model.encode(\n        sentences=request.sentences,\n        normalize_embeddings=request.normalize_embeddings,\n    )\n    return EmbeddingResponse(\n        model=request.model,\n        embeddings=embeddings.tolist(),\n    )\n\n\nclass RerankerRequest(BaseModel):\n    model: str = DEFAULT_RERANKER_MODEL\n    query: str\n    passages: list[str]\n\n\nclass RerankerResponse(BaseModel):\n    model: str\n    scores: list[float]\n\n\n@router.post(\"/reranker\")\ndef reranker_texts(request: RerankerRequest) -> RerankerResponse:\n    reranker_model = get_reranker_model(request.model)\n    sentence_pairs = [(request.query, p) for p in request.passages]\n    scores = reranker_model.predict(sentence_pairs, convert_to_tensor=True)\n    return RerankerResponse(model=request.model, scores=scores.tolist())\n\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    if PRE_LOAD_DEFAULT_EMBEDDING_MODEL:\n        logger.info(f\"Loading default embedding model: {DEFAULT_EMBEDDING_MODEL}\")\n        get_embedding_model(DEFAULT_EMBEDDING_MODEL)\n        logger.info(\"Default embedding model loaded\")\n    if PRE_LOAD_DEFAULT_RERANKER_MODEL:\n        logger.info(f\"Loading default reranker model: {DEFAULT_RERANKER_MODEL}\")\n        get_reranker_model(DEFAULT_RERANKER_MODEL)\n        logger.info(\"Default reranker model loaded\")\n    yield\n\n\napp = FastAPI(lifespan=lifespan)\napp.include_router(router=router, prefix=\"/api/v1\")\n\n\nif __name__ == \"__main__\":\n    uvicorn.run(\"main:app\", host=\"0.0.0.0\", port=5001, reload=True)\n"
  },
  {
    "path": "backend/local_embedding_reranker/requirements.txt",
    "content": "fastapi==0.112.2\nuvicorn==0.30.6\nsentence_transformers==3.0.1\neinops==0.8.0\nenvirons==11.1.0"
  },
  {
    "path": "backend/main.py",
    "content": "import warnings\nimport logging\nfrom logging.config import dictConfig\nfrom contextlib import asynccontextmanager\n\nimport click\nimport sentry_sdk\nimport uvicorn\nfrom fastapi import FastAPI, Request, Response\nfrom fastapi.routing import APIRoute\nfrom starlette.middleware.cors import CORSMiddleware\nfrom dotenv import load_dotenv\n\nfrom app.api.main import api_router\nfrom app.core.config import settings, Environment\nfrom app.site_settings import SiteSetting\nfrom app.utils.uuid6 import uuid7\n\ndictConfig(\n    {\n        \"version\": 1,\n        \"disable_existing_loggers\": False,\n        \"formatters\": {\n            \"default\": {\n                \"format\": \"%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s\",\n            },\n        },\n        \"handlers\": {\n            \"console\": {\n                \"class\": \"logging.StreamHandler\",\n                \"formatter\": \"default\",\n            },\n        },\n        \"root\": {\n            \"level\": logging.INFO\n            if settings.ENVIRONMENT != Environment.LOCAL\n            else logging.DEBUG,\n            \"handlers\": [\"console\"],\n        },\n        \"loggers\": {\n            \"uvicorn.error\": {\n                \"level\": \"ERROR\",\n                \"handlers\": [\"console\"],\n                \"propagate\": False,\n            },\n            \"uvicorn.access\": {\n                \"level\": \"INFO\",\n                \"handlers\": [\"console\"],\n                \"propagate\": False,\n            },\n        },\n    }\n)\n\n\nlogger = logging.getLogger(__name__)\n\n\nload_dotenv()\n\n\ndef custom_generate_unique_id(route: APIRoute) -> str:\n    return f\"{route.tags[0]}-{route.name}\"\n\n\nif settings.SENTRY_DSN and settings.ENVIRONMENT != \"local\":\n    sentry_sdk.init(\n        dsn=str(settings.SENTRY_DSN),\n        enable_tracing=True,\n        traces_sample_rate=settings.SENTRY_TRACES_SAMPLE_RATE,\n        profiles_sample_rate=settings.SENTRY_PROFILES_SAMPLE_RATE,\n    )\n\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    SiteSetting.update_db_cache()\n    yield\n\n\napp = FastAPI(\n    title=settings.PROJECT_NAME,\n    openapi_url=f\"{settings.API_V1_STR}/openapi.json\",\n    generate_unique_id_function=custom_generate_unique_id,\n    lifespan=lifespan,\n)\n\n\n# Set all CORS enabled origins\nif settings.BACKEND_CORS_ORIGINS:\n    app.add_middleware(\n        CORSMiddleware,\n        allow_origins=[\n            str(origin).strip(\"/\") for origin in settings.BACKEND_CORS_ORIGINS\n        ],\n        allow_origin_regex=settings.BACKEND_CORS_ORIGIN_REGEXP,\n        allow_credentials=True,\n        allow_methods=[\"*\"],\n        allow_headers=[\"*\"],\n    )\n\n\n@app.middleware(\"http\")\nasync def identify_browser(request: Request, call_next):\n    browser_id = request.cookies.get(settings.BROWSER_ID_COOKIE_NAME)\n    has_browser_id = bool(browser_id)\n    if not browser_id:\n        browser_id = uuid7()\n    request.state.browser_id = browser_id\n    response: Response = await call_next(request)\n    if not has_browser_id:\n        response.set_cookie(\n            settings.BROWSER_ID_COOKIE_NAME,\n            browser_id,\n            max_age=settings.BROWSER_ID_COOKIE_MAX_AGE,\n        )\n    return response\n\n\napp.include_router(api_router, prefix=settings.API_V1_STR)\n\n\n@click.group(context_settings={\"max_content_width\": 150})\ndef cli():\n    pass\n\n\n@cli.command()\n@click.option(\"--host\", default=\"127.0.0.1\", help=\"Host, default=127.0.0.1\")\n@click.option(\"--port\", default=3000, help=\"Port, default=3000\")\n@click.option(\"--workers\", default=4)\ndef runserver(host, port, workers):\n    warnings.warn(\n        \"This command will start the server in development mode, do not use it in production.\"\n    )\n    uvicorn.run(\n        \"main:app\",\n        host=host,\n        port=port,\n        reload=True,\n        log_level=\"debug\",\n        workers=workers,\n    )\n\n\n@cli.command()\n@click.option(\n    \"--dataset\", default=\"regression\", help=\"Dataset name, default=regression\"\n)\n@click.option(\n    \"--llm-provider\",\n    default=\"openai\",\n    help=\"LLM provider, default=openai, options=[openai, gemini]\",\n)\n@click.option(\"--run-name\", default=None, help=\"Run name, default=None\")\n@click.option(\n    \"--tidb-ai-chat-engine\",\n    default=\"default\",\n    help=\"TiDB AI chat engine, default=default\",\n)\ndef runeval(dataset, llm_provider, run_name, tidb_ai_chat_engine):\n    from app.evaluation.evals import Evaluation\n\n    eval = Evaluation(\n        dataset_name=dataset,\n        llm_provider=llm_provider,\n        run_name=run_name,\n        tidb_ai_chat_engine=tidb_ai_chat_engine,\n    )\n    eval.run()\n\n\n@cli.command()\n@click.option(\n    \"--csv\",\n    default=\"autoflow_dataset.csv\",\n    help=\"Dataset CSV file name that contains two columns `query` and `reference`, default='autoflow_dataset.csv'\",\n)\n@click.option(\n    \"--llm-provider\",\n    default=\"openai\",\n    help=\"LLM provider, default=openai, options=[openai, gemini]\",\n)\n@click.option(\"--run-name\", default=None, help=\"Run name, default=None\")\n@click.option(\n    \"--tidb-ai-chat-engine\",\n    default=\"default\",\n    help=\"TiDB AI chat engine, default=default\",\n)\n@click.option(\"--run-size\", default=30, help=\"Run size, default=30\")\ndef runeval_dataset(csv, llm_provider, run_name, tidb_ai_chat_engine, run_size):\n    from app.evaluation.evals import Evaluation\n\n    evaluation = Evaluation(\n        dataset_name=\"customize\",\n        llm_provider=llm_provider,\n        run_name=run_name,\n        tidb_ai_chat_engine=tidb_ai_chat_engine,\n    )\n    evaluation.runeval_dataset(csv_dataset=csv, run_size=run_size)\n\n\n@cli.command()\n@click.option(\"--query\", default=None, help=\"query\")\ndef generate_answer_by_tidb_ai(query: str):\n    from app.evaluation.evals import Evaluation\n\n    evaluation = Evaluation(\n        dataset_name=\"customize\",\n        llm_provider=\"openai\",\n        run_name=None,\n        tidb_ai_chat_engine=\"default\",\n    )\n\n    print(\n        evaluation.generate_answer_by_tidb_ai(\n            messages=[{\"role\": \"user\", \"content\": query}]\n        )\n    )\n\n\nif __name__ == \"__main__\":\n    cli()\n"
  },
  {
    "path": "backend/prestart.sh",
    "content": "#! /usr/bin/env bash\n"
  },
  {
    "path": "backend/pyproject.toml",
    "content": "[project]\nname = \"tidb-ai-backend\"\nversion = \"0.2.9\"\ndescription = \"The Backend Server for TiDB.AI\"\nauthors = [\n    { name = \"wd0517\", email = \"me@wangdi.ink\" },\n    { name = \"mini256\", email = \"minianter@gmail.com\" }\n]\nreadme = \"README.md\"\nrequires-python = \">= 3.10\"\ndependencies = [\n    \"fastapi>=0.115.6\",\n    \"fastapi-cli>=0.0.5\",\n    \"fastapi-users>=13.0.0\",\n    \"fastapi-pagination>=0.12.25\",\n    \"fastapi-users-db-sqlmodel>=0.3.0\",\n    \"sqlmodel==0.0.19\",\n    \"pymysql>=1.1.1\",\n    \"asyncmy>=0.2.9\",\n    \"tidb-vector>=0.0.14\",\n    \"alembic>=1.14.0\",\n    \"pydantic>=2.10.5\",\n    # Update Check: https://github.com/pydantic/pydantic/issues/8061\n    \"pydantic-settings>=2.3.3\",\n    \"redis>=5.0.5\",\n    \"celery>=5.4.0\",\n    \"flower>=2.0.1\",\n    \"httpx-oauth>=0.14.1\",\n    \"uvicorn>=0.30.3\",\n    \"gunicorn>=22.0.0\",\n    \"python-dotenv>=1.0.1\",\n    \"sentry-sdk>=2.5.1\",\n    \"langfuse>=2.59.1\",\n    \"langchain-openai>=0.2.9\",\n    # Pin ragas to 0.2.6 to avoid async loop error:\n    # https://github.com/explodinggradients/ragas/issues/1819\n    \"ragas==0.2.6\",\n    \"deepeval>=0.21.73\",\n    \"click>=8.1.7\",\n    \"tenacity~=8.4.0\",\n    \"retry>=0.9.2\",\n    \"deepdiff>=7.0.1\",\n    \"colorama>=0.4.6\",\n    \"jinja2>=3.1.4\",\n    \"pyyaml>=6.0.1\",\n    \"playwright>=1.45.1\",\n    \"markdownify>=0.13.1\",\n    \"pypdf>=4.3.1\",\n    \"python-docx>=1.1.2\",\n    \"python-pptx>=1.0.2\",\n    \"openpyxl>=3.1.5\",\n    \"llama-index>=0.12.16\",\n    \"llama-index-llms-openai>=0.6.12\",\n    \"llama-index-llms-openai-like>=0.5.3\",\n    \"llama-index-llms-bedrock-converse>=0.4.15\",\n    \"llama-index-llms-ollama>=0.5.0\",\n    \"llama-index-llms-google-genai>=0.1.6\",\n    \"llama-index-llms-azure-openai>=0.3.0\",\n    \"llama-index-embeddings-ollama>=0.5.0\",\n    \"llama-index-embeddings-jinaai>=0.4.0\",\n    \"llama-index-embeddings-cohere>=0.4.0\",\n    \"llama-index-embeddings-bedrock>=0.4.0\",\n    \"llama-index-embeddings-azure-openai>=0.3.0\",\n    \"llama-index-postprocessor-jinaai-rerank>=0.3.0\",\n    \"llama-index-postprocessor-cohere-rerank>=0.3.0\",\n    \"llama-index-postprocessor-xinference-rerank>=0.2.0\",\n    \"llama-index-postprocessor-bedrock-rerank>=0.3.0\",\n    \"llama-index-llms-vertex>=0.4.2\",\n    \"socksio>=1.0.0\",\n    \"dspy>=2.6.21\",\n    \"litellm>=1.77.5\",\n]\n\n\n[dependency-groups]\ndev = [\n    \"pre-commit>=4.2.0\",\n    \"pytest-asyncio>=0.25.3\",\n    \"ruff>=0.11.2\",\n]\n\n# Lint\n\n[tool.ruff.lint]\nignore = [\"E711\", \"E712\", \"F811\",  \"F841\"]\n\n# Test\n\n[tool.pytest.ini_options]\nlog_cli = true\nlog_cli_level = \"INFO\"\nlog_cli_format = \"%(asctime)s - %(name)s - %(levelname)s - %(message)s\"\nlog_cli_date_format = \"%Y-%m-%d %H:%M:%S\"\nasyncio_mode = \"auto\"\nenv_files = [\"tests/.env\"]\n"
  },
  {
    "path": "backend/supervisord.conf",
    "content": "[supervisord]\nnodaemon=true\nlogfile=/var/log/supervisord.log\n\n[program:celery_worker]\ncommand=celery -A app.celery worker -n worker-default@%%h -Q default --concurrency=5 --loglevel=INFO --logfile=/var/log/celery_worker.log\ndirectory=/app\nstdout_logfile=/var/log/celery_worker_supervisor.log\nstdout_logfile_maxbytes=52428800\nredirect_stderr=true\nautorestart=true\n\n[program:evaluation_worker]\ncommand=celery -A app.celery worker -n worker-evaluation@%%h -Q evaluation --pool=solo --loglevel=INFO --logfile=/var/log/evaluation_worker.log\ndirectory=/app\nstdout_logfile=/var/log/evaluation_worker_supervisor.log\nstdout_logfile_maxbytes=52428800\nredirect_stderr=true\nautorestart=true\n\n[program:celery_flower]\ncommand=celery -A app.celery flower --address=0.0.0.0 --port=5555\ndirectory=/app\nstdout_logfile=/var/log/celery_flower_supervisor.log\nstdout_logfile_maxbytes=52428800\nredirect_stderr=true\nautorestart=true\n\n# Pushes all logs from the above programs to stdout\n# No log rotation here, since it's stdout it's handled by the Docker container loglevel\n# To be standard across all the services\n[program:log-redirect-handler]\ncommand=tail -qF /var/log/celery_worker.log /var/log/celery_worker_supervisor.log\nstdout_logfile=/dev/stdout\nstdout_logfile_maxbytes=0\nredirect_stderr=true\nautorestart=true"
  },
  {
    "path": "backend/tests/__init__.py",
    "content": ""
  },
  {
    "path": "backend/tests/conftest.py",
    "content": "import pytest\nfrom dotenv import load_dotenv\n\n\n@pytest.fixture(scope=\"session\", autouse=True)\ndef env():\n    print(\"Loading environment variables\")\n    load_dotenv()\n"
  },
  {
    "path": "backend/tests/test_dynamic_models.py",
    "content": "import threading\nfrom app.models.entity import get_dynamic_entity_model\nfrom app.models.relationship import get_dynamic_relationship_model\nfrom app.models.chunk import get_dynamic_chunk_model\n\n\ndef dynamic_model_creation(dim, ns):\n    entity_model = get_dynamic_entity_model(dim, ns)\n    relationship_model = get_dynamic_relationship_model(dim, ns, entity_model)\n    chunk_model = get_dynamic_chunk_model(dim, ns)\n    return entity_model, relationship_model, chunk_model\n\n\ndef test_concurrent_dynamic_model_creation():\n    results = [None] * 10\n    threads = []\n    for i in range(10):\n        t = threading.Thread(\n            target=lambda idx: results.__setitem__(\n                idx, dynamic_model_creation(128, \"test\")\n            ),\n            args=(i,),\n        )\n        threads.append(t)\n    for t in threads:\n        t.start()\n    for t in threads:\n        t.join()\n\n    # Ensure each model is created only once across all threads\n    entity_models, relationship_models, chunk_models = zip(*results)\n    assert all(m is entity_models[0] for m in entity_models)\n    assert all(m is relationship_models[0] for m in relationship_models)\n    assert all(m is chunk_models[0] for m in chunk_models)\n"
  },
  {
    "path": "backend/tests/test_llms.py",
    "content": "import json\nimport os\nimport logging\nfrom logging import getLogger\n\nimport pytest\nimport dspy\n\nfrom litellm import verbose_logger\nfrom llama_index.core import PromptTemplate\nfrom llama_index.core.base.llms.base import BaseLLM\n\nfrom app.rag.indices.knowledge_graph.extractor import Extractor\nfrom app.rag.llms.provider import LLMProvider\nfrom app.rag.llms.resolver import resolve_llm\nfrom app.rag.question_gen.query_decomposer import QueryDecomposer\nfrom app.rag.llms.dspy import get_dspy_lm_by_llama_llm\n\n\nquestion = \"Is TiDB open source? (Yes/No)\"\ncontent = \"\"\"\nTiDB is a distributed database that you can use the MySQL client to connect to.\n\"\"\"\n\nos.environ[\"LITELLM_LOG\"] = \"DEBUG\"\nverbose_logger.setLevel(logging.WARN)\n\nlogger = getLogger(__name__)\n\n\ndef check_llm_answer_simple_question(llm: BaseLLM):\n    prompt = PromptTemplate(question)\n    output = llm.predict(prompt)\n\n    assert \"yes\" in output.lower()\n\n    logger.info(f\"Generated answer: \\n{output}\")\n\n\ndef check_dspy_lm_decompose_question(lm: dspy.LM):\n    decomposer = QueryDecomposer(lm)\n    subquestions = decomposer.decompose(\"What is TiDB\").questions\n\n    assert len(subquestions) >= 1\n\n    questions = [q.question for q in subquestions]\n    logger.info(f\"Generated sub-question: \\n{questions}\")\n\n\ndef check_dspy_lm_extract_graph(lm: dspy.LM):\n    extractor = Extractor(lm)\n    kg = extractor.forward(content).knowledge\n\n    assert len(kg.entities) >= 2\n    assert len(kg.relationships) >= 1\n\n    entities = [f\"{e.name}: {e.description}\" for e in kg.entities]\n    relationships = [\n        f\"{r.source_entity} -> {r.relationship_desc} -> {r.target_entity}\"\n        for r in kg.relationships\n    ]\n    logger.info(f\"Extracted entities: \\n{entities}\")\n    logger.info(f\"Extracted relationships: \\n{relationships}\")\n\n\ndef test_openai():\n    llm = resolve_llm(\n        provider=LLMProvider.OPENAI,\n        model=\"gpt-4o-mini\",\n        config={},\n        credentials=os.getenv(\"OPENAI_API_KEY\"),\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n\n\n@pytest.mark.skipif(\n    os.getenv(\"GITHUB_ACTIONS\"), reason=\"ollama is not available on GitHub Actions\"\n)\ndef test_ollama():\n    llm = resolve_llm(\n        provider=LLMProvider.OLLAMA,\n        model=\"gemma3:4b\",\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n\n\ndef test_gitee_ai():\n    llm = resolve_llm(\n        provider=LLMProvider.GITEEAI,\n        model=\"Qwen2.5-72B-Instruct\",\n        credentials=os.getenv(\"GITEEAI_API_KEY\"),\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n\n\ndef test_bedrock():\n    llm = resolve_llm(\n        provider=LLMProvider.BEDROCK,\n        model=\"meta.llama3-1-70b-instruct-v1:0\",\n        credentials={\n            \"aws_access_key_id\": os.getenv(\"AWS_ACCESS_KEY_ID\"),\n            \"aws_secret_access_key\": os.getenv(\"AWS_SECRET_ACCESS_KEY\"),\n            \"aws_region_name\": os.getenv(\"AWS_REGION_NAME\"),\n        },\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n\n\ndef test_vertex_ai():\n    llm = resolve_llm(\n        provider=LLMProvider.VERTEX_AI,\n        model=\"gemini-2.0-flash-001\",\n        credentials=json.loads(os.getenv(\"GOOGLE_SERVICE_ACCOUNT_CREDENTIALS\")),\n        config={\"location\": \"us-west1\"},\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n\n\ndef test_gemini():\n    llm = resolve_llm(\n        provider=LLMProvider.GEMINI,\n        model=\"models/gemini-2.0-flash-001\",\n        credentials=os.getenv(\"GEMINI_API_KEY\"),\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n\n\ndef test_azure_ai():\n    llm = resolve_llm(\n        provider=LLMProvider.AZURE_OPENAI,\n        model=\"gpt-4o-mini\",\n        credentials=os.getenv(\"AZURE_AI_API_KEY\"),\n        config={\n            \"azure_endpoint\": os.getenv(\"AZURE_AI_ENDPOINT\"),\n            \"engine\": \"gpt-4o\",\n            \"api_version\": \"2025-01-01-preview\",\n        },\n    )\n    check_llm_answer_simple_question(llm)\n\n    lm = get_dspy_lm_by_llama_llm(llm)\n    check_dspy_lm_decompose_question(lm)\n    check_dspy_lm_extract_graph(lm)\n"
  },
  {
    "path": "core/.cursor/rules/code-style.mdc",
    "content": "---\ndescription: \nglobs: \nalwaysApply: true\n---\n\n# Code Style\n\n- Always sort the imports with the order (standard, third-party, local)\n"
  },
  {
    "path": "core/.gitignore",
    "content": "# Python generated files\n__pycache__/\n*.py[oc]\nbuild/\ndist/\nwheels/\n*.egg-info\n.mypy_cache\n\n# Environment\n.env\n\n# Virtual environment\n.venv\n\n# IDE\n.idea\n\n# pytest\n.pytest_cache"
  },
  {
    "path": "core/.python-version",
    "content": "3.12.4\n"
  },
  {
    "path": "core/Makefile",
    "content": "\ninstall:\n\t@pip install uv\n\t@uv sync\n\ntest:\n\t@PYTHONPATH=$(PWD) uv run pytest tests\n\nbuild:\n\t@PYTHONPATH=$(PWD) uv build\n\npublish:\n\t@uv publish\n\nlint:\n\t@uv tool run ruff check\n\nformat:\n\t@uv tool run format "
  },
  {
    "path": "core/README.md",
    "content": "# Autoflow\n\nA unified storage layer for AI agents and RAG applications."
  },
  {
    "path": "core/autoflow/__init__.py",
    "content": "import os\nfrom .main import Autoflow\n\nif os.getenv(\"LITELLM_LOCAL_MODEL_COST_MAP\") is None:\n    os.environ[\"LITELLM_LOCAL_MODEL_COST_MAP\"] = \"True\"\n\n__all__ = [\n    \"Autoflow\",\n]\n"
  },
  {
    "path": "core/autoflow/chunkers/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/chunkers/base.py",
    "content": "from abc import abstractmethod\n\nfrom autoflow.types import BaseComponent\nfrom autoflow.storage.doc_store import Document\n\n\nclass Chunker(BaseComponent):\n    @abstractmethod\n    def chunk(self, document: Document) -> Document:\n        raise NotImplementedError\n"
  },
  {
    "path": "core/autoflow/chunkers/helper.py",
    "content": "from autoflow.chunkers.base import Chunker\nfrom autoflow.data_types import DataType\n\n\ndef get_chunker_for_datatype(datatype: DataType) -> Chunker:\n    if datatype in [DataType.MARKDOWN, DataType.HTML, DataType.PDF]:\n        from autoflow.chunkers.text import TextChunker\n\n        return TextChunker()\n    else:\n        raise ValueError(f\"Unsupported data type: {datatype}\")\n"
  },
  {
    "path": "core/autoflow/chunkers/text.py",
    "content": "from typing import Optional\n\nfrom autoflow.chunkers.base import Chunker\nfrom autoflow.configs.chunkers.text import TextChunkerConfig\nfrom autoflow.storage.doc_store import Document, Chunk\n\n\nclass TextChunker(Chunker):\n    \"\"\"Chunker for text.\"\"\"\n\n    def __init__(self, config: Optional[TextChunkerConfig] = TextChunkerConfig()):\n        super().__init__()\n        from llama_index.core.node_parser import SentenceSplitter\n\n        self._splitter = SentenceSplitter(\n            chunk_size=config.chunk_size,\n            chunk_overlap=config.chunk_overlap,\n        )\n\n    def chunk(self, document: Document) -> Document:\n        texts = self._splitter.split_text(document.content)\n        document.chunks = [Chunk(text=text) for text in texts]\n        return document\n"
  },
  {
    "path": "core/autoflow/configs/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/configs/chunkers/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/configs/chunkers/base.py",
    "content": "from typing import Optional, Dict\n\nfrom pydantic import BaseModel, Field, model_validator\n\n\nclass ChunkerConfig(BaseModel):\n    provider: str = Field(\n        description=\"Provider of the chunker (e.g., 'text')\",\n        default=\"openai\",\n    )\n    config: Optional[Dict] = Field(\n        description=\"Configuration for the specific chunker\",\n        default=None,\n    )\n\n    _provider_configs: Dict[str, str] = {\n        \"text\": \"TextChunkerConfig\",\n    }\n\n    @model_validator(mode=\"after\")\n    def validate_and_create_config(self) -> \"ChunkerConfig\":\n        provider = self.provider\n        config = self.config\n\n        if provider not in self._provider_configs:\n            raise ValueError(f\"Unsupported chunker provider: {provider}\")\n\n        module = __import__(\n            f\"autoflow.configs.chunkers.{provider}\",\n            fromlist=[self._provider_configs[provider]],\n        )\n        config_class = getattr(module, self._provider_configs[provider])\n\n        if config is None:\n            config = {}\n\n        if not isinstance(config, dict):\n            if not isinstance(config, config_class):\n                raise ValueError(\n                    f\"Invalid config type for chunker provider: {provider}\"\n                )\n            return self\n\n        self.config = config_class(**config)\n        return self\n"
  },
  {
    "path": "core/autoflow/configs/chunkers/text.py",
    "content": "from pydantic import BaseModel, Field\n\n\nDEFAULT_CHUNK_SIZE = 1024  # tokens\nDEFAULT_CHUNK_OVERLAP = 20  # tokens\n\n\nclass TextChunkerConfig(BaseModel):\n    chunk_size: int = Field(default=DEFAULT_CHUNK_SIZE, description=\"Chunk size\")\n    chunk_overlap: int = Field(\n        default=DEFAULT_CHUNK_OVERLAP, description=\"Chunk overlap\"\n    )\n"
  },
  {
    "path": "core/autoflow/configs/db.py",
    "content": "from typing import Optional\n\nfrom pydantic import BaseModel, Field\n\n\nclass DatabaseConfig(BaseModel):\n    database_url: Optional[str] = Field(\n        description=\"Database connection string\",\n        default=None,\n    )\n    provider: Optional[str] = Field(\n        description=\"Database provider\",\n        default=\"tidb\",\n    )\n    host: Optional[str] = Field(\n        description=\"Database host.\",\n        default=\"localhost\",\n    )\n    port: Optional[int] = Field(description=\"Database port.\", default=4000)\n    username: Optional[str] = Field(\n        description=\"The username to connect the database.\",\n        default=\"root\",\n    )\n    password: Optional[str] = Field(\n        description=\"The password to connect the database.\",\n        default=\"\",\n    )\n    database: str = Field(\n        description=\"Default name for the database\",\n        default=\"autoflow\",\n    )\n    enable_ssl: Optional[bool] = Field(\n        description=\"Enable SSL connection.\",\n        default=True,\n    )\n"
  },
  {
    "path": "core/autoflow/configs/knowledge_base.py",
    "content": "from enum import Enum\nfrom typing import List, Optional\n\nfrom pydantic import BaseModel\nfrom autoflow.configs.models.llms import LLMConfig\nfrom autoflow.configs.models.embeddings import EmbeddingModelConfig\nfrom autoflow.configs.models.rerankers import RerankerConfig\n\n# Index Methods\n\n\nclass IndexMethod(str, Enum):\n    VECTOR_SEARCH = \"VECTOR_SEARCH\"\n    FULLTEXT_SEARCH = \"FULLTEXT_SEARCH\"\n    KNOWLEDGE_GRAPH = \"KNOWLEDGE_GRAPH\"\n\n\nDEFAULT_INDEX_METHODS = [IndexMethod.VECTOR_SEARCH]\n\n# Knowledge Base Config\n\n\nclass Version(int, Enum):\n    V1 = 1\n\n\nclass KnowledgeBaseConfig(BaseModel):\n    version: int = Version.V1\n    name: str\n    description: Optional[str] = None\n    index_methods: List[IndexMethod] = DEFAULT_INDEX_METHODS\n    llm: LLMConfig = None\n    embedding_model: EmbeddingModelConfig = None\n    reranker: RerankerConfig = None\n"
  },
  {
    "path": "core/autoflow/configs/main.py",
    "content": "from pydantic import BaseModel\n\nfrom autoflow.configs.db import DatabaseConfig\n\n\nclass Config(BaseModel):\n    db: DatabaseConfig = DatabaseConfig()\n"
  },
  {
    "path": "core/autoflow/configs/models/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/configs/models/embeddings/__init__.py",
    "content": "from .base import EmbeddingModelConfig\n\n__all__ = [\n    \"EmbeddingModelConfig\",\n]\n"
  },
  {
    "path": "core/autoflow/configs/models/embeddings/base.py",
    "content": "from typing import Optional, Dict\n\nfrom pydantic import BaseModel, Field, model_validator\n\nfrom autoflow.configs.models.providers import ModelProviders\n\n\nclass EmbeddingModelConfig(BaseModel):\n    provider: ModelProviders = Field(\n        description=\"Provider of the embedding_models models (e.g., 'openai')\",\n        default=ModelProviders.OPENAI,\n    )\n    config: Optional[Dict] = Field(\n        description=\"Configuration for the specific embedding_models model\",\n        default=None,\n    )\n\n    _provider_configs: Dict[str, str] = {\n        \"openai\": \"OpenAIEmbeddingConfig\",\n        \"jina_ai\": \"JinaAIEmbeddingConfig\",\n    }\n\n    @model_validator(mode=\"after\")\n    def validate_and_create_config(self) -> \"EmbeddingModelConfig\":\n        provider = self.provider.value\n        config = self.config\n\n        if provider not in self._provider_configs:\n            raise ValueError(f\"Unsupported embedding_models provider: {provider}\")\n\n        module = __import__(\n            f\"autoflow.configs.models.embeddings.{provider}\",\n            fromlist=[self._provider_configs[provider]],\n        )\n        config_class = getattr(module, self._provider_configs[provider])\n\n        if config is None:\n            config = {}\n\n        if not isinstance(config, dict):\n            if not isinstance(config, config_class):\n                raise ValueError(\n                    f\"Invalid config type for embedding_models provider: {provider}\"\n                )\n            return self\n\n        self.config = config_class(**config)\n        return self\n"
  },
  {
    "path": "core/autoflow/configs/models/embeddings/common.py",
    "content": "from typing import Optional\n\nfrom pydantic import BaseModel, Field\n\n\nclass BaseEmbeddingConfig(BaseModel):\n    model: str = Field(\n        description=\"The model to use for the embedding\",\n        default=\"text-embedding-3-small\",\n    )\n    max_tokens: Optional[int] = None\n    temperature: float = 0.1\n"
  },
  {
    "path": "core/autoflow/configs/models/embeddings/jina_ai.py",
    "content": "from autoflow.configs.models.embeddings.common import BaseEmbeddingConfig\n\n\nclass JinaAIEmbeddingConfig(BaseEmbeddingConfig):\n    pass\n"
  },
  {
    "path": "core/autoflow/configs/models/embeddings/openai.py",
    "content": "from autoflow.configs.models.embeddings.common import BaseEmbeddingConfig\n\n\nclass OpenAIEmbeddingConfig(BaseEmbeddingConfig):\n    pass\n"
  },
  {
    "path": "core/autoflow/configs/models/llms/__init__.py",
    "content": "from .base import LLMConfig\n\n__all__ = [\"LLMConfig\"]\n"
  },
  {
    "path": "core/autoflow/configs/models/llms/base.py",
    "content": "from typing import Optional, Dict\n\nfrom pydantic import BaseModel, Field, model_validator\n\nfrom autoflow.configs.models.providers import ModelProviders\n\nDEFAULT_TEMPERATURE = 0.1\n\n\nclass LLMConfig(BaseModel):\n    provider: ModelProviders = Field(\n        description=\"Provider of the large language models (LLM) (e.g., 'openai')\",\n        default=ModelProviders.OPENAI,\n    )\n    config: Optional[Dict] = Field(\n        description=\"Configuration for the specific database\",\n        default=None,\n    )\n    _llm_configs: Dict[str, str] = {\n        \"openai\": \"OpenAILLMConfig\",\n        \"custom\": \"CustomLLMConfig\",\n    }\n\n    @model_validator(mode=\"after\")\n    def validate_and_create_config(self) -> \"LLMConfig\":\n        provider = self.provider.value\n        config = self.config\n\n        if provider not in self._llm_configs:\n            raise ValueError(f\"Unsupported llm provider: {provider}\")\n\n        module = __import__(\n            f\"autoflow.configs.models.llms.{provider}\",\n            fromlist=[self._llm_configs[provider]],\n        )\n        config_class = getattr(module, self._llm_configs[provider])\n\n        if config is None:\n            config = {}\n\n        if not isinstance(config, dict):\n            if not isinstance(config, config_class):\n                raise ValueError(f\"Invalid config type for llm provider: {provider}\")\n            return self\n\n        self.config = config_class(**config)\n        return self\n"
  },
  {
    "path": "core/autoflow/configs/models/llms/common.py",
    "content": "from typing import Optional\n\nfrom pydantic import Field, BaseModel\n\n\nclass BaseLLMConfig(BaseModel):\n    model: str = Field(\n        description=\"The model to use for the LLM\",\n        default=\"gpt-4o\",\n    )\n    max_tokens: Optional[int] = None\n    temperature: float = 0.1\n"
  },
  {
    "path": "core/autoflow/configs/models/llms/openai.py",
    "content": "from autoflow.configs.models.llms.common import BaseLLMConfig\n\n\nclass OpenAILLMConfig(BaseLLMConfig):\n    pass\n"
  },
  {
    "path": "core/autoflow/configs/models/manager.py",
    "content": "from typing import Dict\n\nfrom pydantic import BaseModel, Field\n\nfrom autoflow.configs.models.providers import ProviderConfig\n\n\nclass ManagerConfig(BaseModel):\n    providers: Dict[str, ProviderConfig] = Field(default_factory=dict)\n"
  },
  {
    "path": "core/autoflow/configs/models/providers/__init__.py",
    "content": "from autoflow.configs.models.providers.base import (\n    ModelProviders,\n    ModelProviderInfo,\n    ProviderConfig,\n)\n\nmodel_providers = [\n    ModelProviderInfo(\n        name=ModelProviders.OPENAI,\n        display_name=\"OpenAI\",\n        description=\"The OpenAI API provides a simple interface for developers to create an intelligence layer in their applications, powered by OpenAI's state of the art models.\",\n        website=\"https://platform.openai.com\",\n        supported_model_types=[\"llm\", \"text_embedding\"],\n    )\n]\n\nmodel_provider_mappings = {provider.name: provider for provider in model_providers}\n\n__all__ = [\n    \"ModelProviders\",\n    \"ModelProviderInfo\",\n    \"ProviderConfig\",\n    \"model_providers\",\n    \"model_provider_mappings\",\n]\n"
  },
  {
    "path": "core/autoflow/configs/models/providers/base.py",
    "content": "from typing import Optional, Literal, List\n\nfrom pydantic import BaseModel, Field\nfrom litellm import LlmProviders\n\nModelProviders = LlmProviders\n\nModelType = Literal[\"llm\", \"text_embedding\", \"rerank\"]\n\n\nclass ModelProviderInfo(BaseModel):\n    name: ModelProviders = Field(\n        description=\"The name of the model provider.\",\n    )\n    logo: Optional[str] = Field(\n        description=\"The logo of the model provider\", default=None\n    )\n    display_name: str = Field(\n        description=\"The name of the model provider\",\n    )\n    description: str = Field(\n        description=\"The description of the model provider\", default=None\n    )\n    website: Optional[str] = Field(\n        description=\"The website of the model provider\", default=None\n    )\n    supported_model_types: List[ModelType] = Field(\n        description=\"The model types supported by the model provider\"\n    )\n\n\nclass ProviderConfig(BaseModel):\n    api_key: Optional[str] = Field(\n        title=\"API key\",\n        default=None,\n    )\n    api_base: Optional[str] = Field(\n        title=\"API Base\",\n        default=None,\n    )\n"
  },
  {
    "path": "core/autoflow/configs/models/providers/jinaai.py",
    "content": "from autoflow.configs.models.providers.base import ProviderConfig\n\n\nclass JinaAIConfig(ProviderConfig):\n    pass\n"
  },
  {
    "path": "core/autoflow/configs/models/providers/openai.py",
    "content": "from autoflow.configs.models.providers.base import ProviderConfig\n\n\nclass OpenAIConfig(ProviderConfig):\n    pass\n"
  },
  {
    "path": "core/autoflow/configs/models/rerankers/__init__.py",
    "content": "from .base import RerankerConfig\n\n__all__ = [\"RerankerConfig\"]\n"
  },
  {
    "path": "core/autoflow/configs/models/rerankers/base.py",
    "content": "from typing import Optional, Dict\n\nfrom pydantic import BaseModel, Field, model_validator\nfrom autoflow.configs.models.providers import ModelProviders\n\n\nclass RerankerConfig(BaseModel):\n    provider: ModelProviders = Field(\n        description=\"Provider of the reranker models (e.g., 'openai')\",\n        default=ModelProviders.OPENAI,\n    )\n    config: Optional[Dict] = Field(\n        description=\"Configuration for the specific reranker model\",\n        default=None,\n    )\n    _provider_configs: Dict[str, str] = {\n        \"jina_ai\": \"JinaAIRerankerConfig\",\n        \"custom\": \"CustomRerankerConfig\",\n    }\n\n    @model_validator(mode=\"after\")\n    def validate_and_create_config(self) -> \"RerankerConfig\":\n        provider = self.provider.value\n        config = self.config\n\n        if provider not in self._provider_configs:\n            raise ValueError(f\"Unsupported reranker provider: {provider}\")\n\n        module = __import__(\n            f\"autoflow.configs.models.rerankers.{provider}\",\n            fromlist=[self._provider_configs[provider]],\n        )\n        config_class = getattr(module, self._provider_configs[provider])\n\n        if config is None:\n            config = {}\n\n        if not isinstance(config, dict):\n            if not isinstance(config, config_class):\n                raise ValueError(\n                    f\"Invalid config type for reranker provider: {provider}\"\n                )\n            return self\n\n        self.config = config_class(**config)\n        return self\n"
  },
  {
    "path": "core/autoflow/configs/models/rerankers/common.py",
    "content": "from pydantic import BaseModel, Field\n\n\nclass BaseRerankerConfig(BaseModel):\n    model: str = Field(\n        description=\"The model to use for the reranker\",\n        default=\"jina-reranker-v2-base-multilingual\",\n    )\n    top_n: int = Field(\n        description=\"The number of results to return\",\n        default=5,\n    )\n"
  },
  {
    "path": "core/autoflow/configs/models/rerankers/jina_ai.py",
    "content": "from autoflow.configs.models.rerankers.common import BaseRerankerConfig\n\n\nclass JinaAIRerankerConfig(BaseRerankerConfig):\n    pass\n"
  },
  {
    "path": "core/autoflow/data_types.py",
    "content": "from enum import Enum\nimport os\nfrom typing import IO, Optional, Union, BinaryIO, TextIO\nfrom urllib.parse import urlparse\n\n\nclass DataType(str, Enum):\n    MARKDOWN = \"markdown\"\n    PDF = \"pdf\"\n    DOCX = \"docx\"\n    PPTX = \"pptx\"\n    XLSX = \"xlsx\"\n    CSV = \"csv\"\n    SITEMAP = \"sitemap\"\n    HTML = \"html\"\n\n\ndef guess_datatype(source: Union[str, IO, BinaryIO, TextIO]) -> Optional[DataType]:\n    if isinstance(source, str):\n        url = urlparse(source)\n        if url.scheme == \"\" or url.scheme == \"file\":\n            return guess_by_filename(url.path)\n        elif url.scheme == \"http\" or url.scheme == \"https\":\n            return DataType.HTML\n        else:\n            if os.path.exists(source):\n                return guess_by_filename(source)\n            raise ValueError(f\"Unsupported URL scheme: {url.scheme}\")\n    elif isinstance(source, IO):\n        return guess_by_filename(source.name)\n    else:\n        return None\n\n\ndef guess_by_filename(filename: str) -> Optional[DataType]:\n    \"\"\"Helper function to guess data type from filename.\"\"\"\n    lower = filename.lower()\n    if lower.endswith(\".md\"):\n        return DataType.MARKDOWN\n    elif lower.endswith(\".pdf\"):\n        return DataType.PDF\n    elif lower.endswith(\".docx\"):\n        return DataType.DOCX\n    elif lower.endswith(\".pptx\"):\n        return DataType.PPTX\n    elif lower.endswith(\".xlsx\"):\n        return DataType.XLSX\n    elif lower.endswith(\".csv\"):\n        return DataType.CSV\n    elif lower.endswith(\".xml\") and \"sitemap\" in lower:\n        return DataType.SITEMAP\n    elif lower.endswith((\".html\", \".htm\")):\n        return DataType.HTML\n    else:\n        return None\n"
  },
  {
    "path": "core/autoflow/db.py",
    "content": "import sqlalchemy\nfrom pytidb.utils import build_tidb_dsn\nfrom sqlalchemy import Engine\n\nfrom autoflow.configs.db import DatabaseConfig\n\n\ndef get_db_engine_from_config(db_config: DatabaseConfig) -> Engine:\n    if db_config.database_url is not None:\n        database_url = db_config.database_url\n    else:\n        database_url = str(\n            build_tidb_dsn(\n                host=db_config.host,\n                port=db_config.port,\n                username=db_config.username,\n                password=db_config.password,\n                database=db_config.database,\n                enable_ssl=db_config.enable_ssl,\n            )\n        )\n\n    # Notice:\n    # In order to save resource consumption, the tidb serverless cluster will \"pause\" automatically if there\n    # are no active connections for more than 5 minutes, it will close all connections on the server side,\n    # so we also need to recycle the connections from the connection pool on the client side.\n    db_engine = sqlalchemy.create_engine(\n        database_url,\n        pool_size=20,\n        max_overflow=40,\n        pool_recycle=300,\n        pool_pre_ping=True,\n    )\n\n    return db_engine\n"
  },
  {
    "path": "core/autoflow/knowledge_base/__init__.py",
    "content": "from .base import KnowledgeBase\n\n__all__ = [\n    \"KnowledgeBase\",\n]\n"
  },
  {
    "path": "core/autoflow/knowledge_base/base.py",
    "content": "import logging\nimport uuid\nfrom typing import List, Optional, Any\nfrom functools import partial\nfrom os import cpu_count\nfrom concurrent.futures import ThreadPoolExecutor\n\nfrom pydantic import Field, PrivateAttr\nfrom sqlalchemy import Engine\nfrom llama_index.core.base.llms.types import ChatResponse\n\nfrom autoflow.chunkers.base import Chunker\nfrom autoflow.chunkers.helper import get_chunker_for_datatype\nfrom autoflow.configs.knowledge_base import IndexMethod\nfrom autoflow.data_types import DataType, guess_datatype\nfrom autoflow.knowledge_base.prompts import QA_WITH_KNOWLEDGE_PROMPT_TEMPLATE\nfrom autoflow.knowledge_graph.index import KnowledgeGraphIndex\nfrom autoflow.loaders.base import Loader\nfrom autoflow.loaders.helper import get_loader_for_datatype\nfrom autoflow.models.llms import LLM\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.models.llms.dspy import get_dspy_lm_by_llm\nfrom autoflow.models.rerank_models import RerankModel\nfrom autoflow.types import BaseComponent, SearchMode\nfrom autoflow.storage.doc_store import DocumentSearchResult, Document\n\nlogger = logging.getLogger(__name__)\n\n\nclass KnowledgeBase(BaseComponent):\n    _llm: LLM = PrivateAttr()\n    namespace: Optional[str] = Field(default=None)\n    name: Optional[str] = Field(default=None)\n    description: Optional[str] = Field(default=None)\n    index_methods: List[IndexMethod] = Field(default=[IndexMethod.VECTOR_SEARCH])\n\n    def __init__(\n        self,\n        db_engine: Engine = None,\n        namespace: Optional[str] = None,\n        name: Optional[str] = None,\n        description: Optional[str] = None,\n        index_methods: Optional[List[IndexMethod]] = None,\n        llm: Optional[LLM] = None,\n        embedding_model: Optional[EmbeddingModel] = None,\n        rerank_model: Optional[RerankModel] = None,\n        max_workers: Optional[int] = None,\n    ):\n        super().__init__(\n            namespace=namespace,\n            name=name,\n            description=description,\n            index_methods=index_methods,\n        )\n        self._db_engine = db_engine\n        self._llm = llm\n        self._embedding_model = embedding_model\n        self._reranker_model = rerank_model\n        self._init_stores()\n        self._init_indexes()\n        self._max_workers = max_workers or cpu_count()\n\n    def _init_stores(self):\n        from autoflow.storage.doc_store.tidb_doc_store import TiDBDocumentStore\n        from autoflow.storage.graph_store.tidb_graph_store import TiDBGraphStore\n        from pytidb import TiDBClient\n\n        self._tidb_client = TiDBClient(self._db_engine)\n        self._doc_store = TiDBDocumentStore(\n            client=self._tidb_client,\n            embedding_model=self._embedding_model,\n            namespace=self.namespace,\n        )\n        self._kg_store = TiDBGraphStore(\n            client=self._tidb_client,\n            embedding_model=self._embedding_model,\n            namespace=self.namespace,\n        )\n\n    def _init_indexes(self):\n        self._dspy_lm = get_dspy_lm_by_llm(self._llm)\n        self._kg_index = KnowledgeGraphIndex(\n            kg_store=self._kg_store,\n            dspy_lm=self._dspy_lm,\n            embedding_model=self._embedding_model,\n        )\n\n    def class_name(self):\n        return \"KnowledgeBase\"\n\n    def documents(self):\n        return self._doc_store\n\n    def knowledge_graph(self):\n        return self._kg_store\n\n    def add(\n        self,\n        source: str | list[str],\n        data_type: Optional[DataType] = None,\n        loader: Optional[Loader] = None,\n        chunker: Optional[Chunker] = None,\n    ) -> List[Document]:\n        if data_type is None:\n            data_type = guess_datatype(source)\n        if data_type is None:\n            raise ValueError(\"Please provide a valid data type.\")\n\n        if loader is None:\n            loader = get_loader_for_datatype(data_type)\n\n        with ThreadPoolExecutor(max_workers=self._max_workers) as executor:\n            build_index_for_document = partial(\n                self.build_index_for_document, chunker=chunker\n            )\n\n            results = executor.map(build_index_for_document, loader.load(source))\n\n        return_documents = []\n        for result in results:\n            return_documents.append(result)\n        return return_documents\n\n    def build_index_for_document(\n        self,\n        document: Document,\n        chunker: Optional[Chunker] = None,\n    ) -> List[Document]:\n        \"\"\"\n        Build index for a document.\n\n        Args:\n            document: The document to build index for.\n            chunker: The chunker to use to chunk the document.\n\n        Returns:\n            A list of documents that are the result of indexing the original document.\n        \"\"\"\n        # TODO: handle duplicate documents.\n        if chunker is None:\n            chunker = get_chunker_for_datatype(document.data_type)\n\n        chunked_document = chunker.chunk(document)\n        self.add_document(chunked_document)\n\n        if IndexMethod.KNOWLEDGE_GRAPH in self.index_methods:\n\n            def add_chunk_to_kg(chunk):\n                logger.info(\"Adding chunk <id: %s> to knowledge graph.\", chunk.id)\n                self._kg_index.add_chunk(chunk)\n\n            with ThreadPoolExecutor(max_workers=self._max_workers) as executor:\n                list(executor.map(add_chunk_to_kg, chunked_document.chunks))\n\n        return chunked_document\n\n    # Document management.\n\n    def add_document(self, document: Document):\n        self._doc_store.add([document])\n\n    def add_documents(self, documents: List[Document]):\n        return self._doc_store.add(documents)\n\n    def list_documents(self) -> List[Document]:\n        return self._doc_store.list()\n\n    def get_document(self, doc_id: uuid.UUID) -> Document:\n        return self._doc_store.get(doc_id)\n\n    def delete_document(self, doc_id: uuid.UUID) -> None:\n        return self._doc_store.delete(doc_id)\n\n    # Search\n\n    def search(self):\n        # TODO: Support one interface search documents and knowledge graph at the same time.\n        raise NotImplementedError()\n\n    def search_documents(\n        self,\n        query: str,\n        mode: SearchMode = \"vector\",\n        similarity_threshold: Optional[float] = None,\n        num_candidate: Optional[int] = None,\n        top_k: Optional[int] = 5,\n        **kwargs: Any,\n    ) -> DocumentSearchResult:\n        return self._doc_store.search(\n            query=query,\n            mode=mode,\n            similarity_threshold=similarity_threshold,\n            num_candidate=num_candidate,\n            top_k=top_k,\n            **kwargs,\n        )\n\n    def search_knowledge_graph(\n        self,\n        query: str,\n        depth: int = 2,\n        metadata_filters: Optional[dict] = None,\n        **kwargs,\n    ):\n        return self._kg_index.retrieve(\n            query=query,\n            depth=depth,\n            metadata_filters=metadata_filters,\n            **kwargs,\n        )\n\n    # Generation.\n\n    def ask(self, question: str) -> ChatResponse:\n        result = self.search_documents(\n            query=question,\n            similarity_threshold=0.4,\n            top_k=5,\n        )\n        chunks = result.chunks\n        knowledge_graph = self.search_knowledge_graph(\n            query=question,\n        )\n        messages = QA_WITH_KNOWLEDGE_PROMPT_TEMPLATE.format_messages(\n            llm=self._llm,\n            query_str=question,\n            chunks=chunks,\n            knowledge_graph=knowledge_graph,\n        )\n        return self._llm.chat(messages)\n\n    # Knowledge Base Operation.\n\n    def reset(self):\n        self._doc_store.reset()\n        self._kg_store.reset()\n"
  },
  {
    "path": "core/autoflow/knowledge_base/prompts.py",
    "content": "from llama_index.core.prompts.rich import RichPromptTemplate\n\nQA_WITH_KNOWLEDGE_PROMPT_TEMPLATE = RichPromptTemplate(\n    template_str=\"\"\"\n    {% chat role=\"system\" %}\n    We have provided context information below.\n    ---------------------\n    {% if knowledge_graph %}\n    <knowledge_graph>\n        <entities>\n            {% for entity in knowledge_graph.entities %}\n            <entity id=\"{{ entity.id }}\">\n            {{ entity.name }}: {{ entity.description }}\n            </entity>\n            {% endfor %}\n        </entities>\n        <relationships>\n            {% for relationship in knowledge_graph.relationships %}\n            <relationship id=\"{{ relationship.id }}\">\n            {{relationship.source_entity.name}} -> {{ relationship.description }} -> {{relationship.target_entity.name}}\n            </relationship>\n            {% endfor %}\n        </relationships>\n    </knowledge_graph>\n    {% endif %}\n\n    {% for chunk in chunks %}\n    <chunk id=\"{{ chunk.id }}\">\n    {{ chunk.text }}\n    </chunk>\n    {% endfor %}\n    ---------------------\n    Given this information, please give a comprehensive answer to the question in Markdown format:\n    {% endchat %}\n\n    {% chat role=\"user\" %}\n    {{ query_str }}\n    {% endchat %}\n    \"\"\"\n)\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/__init__.py",
    "content": "\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/extractors/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/knowledge_graph/extractors/base.py",
    "content": "from abc import abstractmethod\n\nfrom autoflow.types import BaseComponent\nfrom autoflow.knowledge_graph.types import GeneratedKnowledgeGraph\n\n\nclass KGExtractor(BaseComponent):\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n\n    @abstractmethod\n    def extract(self, text: str) -> GeneratedKnowledgeGraph:\n        raise NotImplementedError()\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/extractors/simple.py",
    "content": "import dspy\n\nfrom autoflow.knowledge_graph.extractors.base import KGExtractor\nfrom autoflow.knowledge_graph.programs.extract_covariates import (\n    EntityCovariateExtractor,\n)\nfrom autoflow.knowledge_graph.programs.extract_graph import KnowledgeGraphExtractor\nfrom autoflow.knowledge_graph.types import GeneratedKnowledgeGraph\n\n\nclass SimpleKGExtractor(KGExtractor):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self._dspy_lm = dspy_lm\n        self._graph_extractor = KnowledgeGraphExtractor(dspy_lm)\n        self._entity_metadata_extractor = EntityCovariateExtractor(dspy_lm)\n\n    def extract(self, text: str) -> GeneratedKnowledgeGraph:\n        knowledge_graph = self._graph_extractor.forward(text)\n        knowledge_graph.entities = self._entity_metadata_extractor.forward(\n            text, knowledge_graph.entities\n        )\n        return knowledge_graph\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/index.py",
    "content": "import logging\nfrom typing import Optional\n\nimport dspy\n\nfrom autoflow.knowledge_graph.extractors.simple import SimpleKGExtractor\nfrom autoflow.knowledge_graph.retrievers.weighted import WeightedGraphRetriever\nfrom autoflow.knowledge_graph.types import (\n    RetrievedKnowledgeGraph,\n)\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.storage.doc_store.types import Chunk\nfrom autoflow.storage.graph_store.base import GraphStore\nfrom autoflow.storage.graph_store.types import KnowledgeGraph\nfrom autoflow.types import BaseComponent\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass KnowledgeGraphIndex(BaseComponent):\n    def __init__(\n        self,\n        kg_store: GraphStore,\n        dspy_lm: dspy.LM,\n        embedding_model: EmbeddingModel,\n    ):\n        super().__init__()\n        self._kg_store = kg_store\n        self._dspy_lm = dspy_lm\n        self._embedding_model = embedding_model\n        self._kg_extractor = SimpleKGExtractor(self._dspy_lm)\n\n    def add_text(self, text: str) -> Optional[KnowledgeGraph]:\n        knowledge_graph = self._kg_extractor.extract(text)\n        return self._kg_store.add(knowledge_graph.to_create())\n\n    def add_chunk(self, chunk: Chunk) -> Optional[KnowledgeGraph]:\n        # Check if the chunk has been added.\n        exists_relationships = self._kg_store.list_relationships(chunk_id=chunk.id)\n        if len(exists_relationships) > 0:\n            logger.warning(\n                \"The subgraph of chunk %s has already been added, skip.\", chunk.id\n            )\n            return None\n\n        logger.info(\"Extracting knowledge graph from chunk %s\", chunk.id)\n        knowledge_graph = self._kg_extractor.extract(chunk)\n        logger.info(\"Knowledge graph extracted from chunk %s\", chunk.id)\n\n        return self._kg_store.add(knowledge_graph.to_create())\n\n    def retrieve(\n        self,\n        query: str,\n        depth: int = 2,\n        metadata_filters: Optional[dict] = None,\n        **kwargs,\n    ) -> RetrievedKnowledgeGraph:\n        retriever = WeightedGraphRetriever(\n            self._kg_store,\n            self._embedding_model,\n            **kwargs,\n        )\n        return retriever.retrieve(\n            query=query,\n            depth=depth,\n            metadata_filters=metadata_filters,\n        )\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/programs/__init__.py",
    "content": "from .extract_graph import ExtractKnowledgeGraph\nfrom .extract_covariates import ExtractEntityCovariate\n\n__all__ = [\"ExtractKnowledgeGraph\", \"ExtractEntityCovariate\"]\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/programs/eval_graph.py",
    "content": "import logging\n\nimport dspy\nfrom dspy import Predict\nfrom pydantic import BaseModel\n\nfrom autoflow.knowledge_graph.types import GeneratedKnowledgeGraph\n\n# Initialize logger\nlogger = logging.getLogger(__name__)\n\n\nclass EvaluateKnowledgeGraph(dspy.Signature):\n    \"\"\"\n    Evaluate the differences between two knowledge graphs and provide scores for each entity and relationship,\n    as well as a final score for the entire knowledge graph.\n\n    Steps:\n    1. Iterate over each entity in the expected knowledge graph\n    2. For each expected entity, find the most similar entity in the actual knowledge graph\n    3. Calculate the score (range from 0 to 1) for the entity based on the similarity\n    4. Iterate over each relationship in the expected knowledge graph\n    5. For each expected relationship, find the most similar relationship in the actual knowledge graph\n    6. Calculate the score (range from 0 to 1) for the relationship based on the similarity\n    7. Average all the scores of entities and relationships to get the final score\n\n    Please only respond in JSON format.\n    \"\"\"\n\n    actual_graph: GeneratedKnowledgeGraph = dspy.InputField(\n        desc=\"The actual knowledge graph extracted from the text\"\n    )\n    expected_graph: GeneratedKnowledgeGraph = dspy.InputField(\n        desc=\"The expected knowledge graph\"\n    )\n    score: float = dspy.OutputField(\n        desc=\"The final score of the actual knowledge graph\"\n    )\n\n\nclass KGEvaluationResult(BaseModel):\n    expected: GeneratedKnowledgeGraph\n    actual: GeneratedKnowledgeGraph\n    score: float\n\n\nclass KnowledgeGraphEvaluator(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.program = Predict(EvaluateKnowledgeGraph)\n\n    def forward(\n        self,\n        actual: GeneratedKnowledgeGraph,\n        expected: GeneratedKnowledgeGraph,\n    ) -> KGEvaluationResult:\n        # Evaluate the knowledge graph using the provided language model\n        with dspy.settings.context(lm=self.dspy_lm):\n            prediction = self.program(actual_graph=actual, expected_graph=expected)\n            return KGEvaluationResult(\n                actual=actual,\n                expected=expected,\n                score=prediction.score,\n            )\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/programs/extract_covariates.py",
    "content": "import logging\nfrom typing import List, Mapping, Any\n\nimport dspy\nfrom dspy import Predict\nfrom pydantic import BaseModel, Field\n\nfrom autoflow.knowledge_graph.types import GeneratedEntity\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass InputEntity(BaseModel):\n    \"\"\"List of entities extracted from the text to form the knowledge graph\"\"\"\n\n    name: str = Field(description=\"Name of the entity\")\n    description: str = Field(description=\"Description of the entity\")\n\n\nclass OutputEntity(BaseModel):\n    \"\"\"List of entities extracted from the text to form the knowledge graph\"\"\"\n\n    name: str = Field(description=\"Name of the entity\")\n    description: str = Field(description=\"Description of the entity\")\n    covariates: Mapping[str, Any] = Field(\n        description=(\n            \"The attributes (which is a comprehensive json TREE, the first field is always: 'topic') to claim the entity. \"\n        )\n    )\n\n\nclass ExtractEntityCovariate(dspy.Signature):\n    \"\"\"Please carefully review the provided text and entities list which are already identified in the text.\n    Focusing on identifying detailed covariates associated with each entities provided.\n\n    Extract and link the covariates (which is a comprehensive json TREE, the first field is always: \"topic\") to their respective entities.\n    Ensure all extracted covariates is clearly connected to the correct entity for accuracy and comprehensive understanding.\n    Ensure that all extracted covariates are factual and verifiable within the text itself, without relying on external knowledge or assumptions.\n    Collectively, the covariates should provide a thorough and precise summary of the entity's characteristics as described in the source material.\n\n    Please only response in JSON format.\n    \"\"\"\n\n    text = dspy.InputField(\n        desc=\"a paragraph of text to extract covariates to claim the entities.\"\n    )\n    input: List[InputEntity] = dspy.InputField(\n        desc=\"List of entities identified in the text.\"\n    )\n    output: List[OutputEntity] = dspy.OutputField(\n        desc=\"List of entities with their covariates.\"\n    )\n\n\nclass EntityCovariateExtractor(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.program = Predict(ExtractEntityCovariate)\n\n    def forward(\n        self, text: str, entities: List[GeneratedEntity]\n    ) -> List[GeneratedEntity]:\n        with dspy.settings.context(lm=self.dspy_lm):\n            input_entities = [\n                InputEntity(\n                    name=entity.name,\n                    description=entity.description,\n                )\n                for entity in entities\n            ]\n\n            predict = self.program(\n                text=text,\n                input=input_entities,\n            )\n\n            output_entity_map = {entity.name: entity for entity in predict.output}\n            for entity in entities:\n                if entity.name in output_entity_map:\n                    # Update the covariates in the metadata of the entity.\n                    entity.meta = output_entity_map[entity.name].covariates\n\n            return entities\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/programs/extract_graph.py",
    "content": "import logging\nfrom typing import List\n\nimport dspy\nfrom dspy import Predict\nfrom pydantic import BaseModel, Field\n\nfrom autoflow.knowledge_graph.types import (\n    GeneratedEntity,\n    GeneratedKnowledgeGraph,\n    GeneratedRelationship,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass PredictEntity(BaseModel):\n    \"\"\"Entity extracted from the text to form the knowledge graph\"\"\"\n\n    name: str = Field(\n        description=\"Name of the entity, it should be a clear and concise term\"\n    )\n    description: str = Field(\n        description=(\n            \"Description of the entity, it should be a complete and comprehensive sentence, not few words. \"\n            \"Sample description of entity 'TiDB in-place upgrade': \"\n            \"'Upgrade TiDB component binary files to achieve upgrade, generally use rolling upgrade method'\"\n        )\n    )\n\n\nclass PredictRelationship(BaseModel):\n    \"\"\"Relationship extracted from the text to form the knowledge graph\"\"\"\n\n    source_entity: str = Field(\n        description=\"Source entity name of the relationship, it should an existing entity in the Entity list\"\n    )\n    target_entity: str = Field(\n        description=\"Target entity name of the relationship, it should an existing entity in the Entity list\"\n    )\n    relationship_desc: str = Field(\n        description=(\n            \"Description of the relationship, it should be a complete and comprehensive sentence, not few words. \"\n            \"For example: 'TiDB will release a new LTS version every 6 months.'\"\n        )\n    )\n\n\nclass PredictKnowledgeGraph(BaseModel):\n    \"\"\"Graph representation of the knowledge for text.\"\"\"\n\n    entities: List[PredictEntity] = Field(\n        description=\"List of entities in the knowledge graph\"\n    )\n    relationships: List[PredictRelationship] = Field(\n        description=\"List of relationships in the knowledge graph\"\n    )\n\n    def to_pandas(self):\n        from pandas import DataFrame\n\n        return {\n            \"entities\": DataFrame(\n                [\n                    {\n                        \"name\": entity.name,\n                        \"description\": entity.description,\n                    }\n                    for entity in self.entities\n                ]\n            ),\n            \"relationships\": DataFrame(\n                [\n                    {\n                        \"source_entity\": relationship.source_entity,\n                        \"relationship_desc\": relationship.relationship_desc,\n                        \"target_entity\": relationship.target_entity,\n                    }\n                    for relationship in self.relationships\n                ]\n            ),\n        }\n\n\nclass ExtractKnowledgeGraph(dspy.Signature):\n    \"\"\"Carefully analyze the provided text from database documentation and community blogs to thoroughly identify all entities related to database technologies, including both general concepts and specific details.\n\n    Follow these Step-by-Step Analysis:\n\n    1. Extract Meaningful Entities:\n      - Identify all significant nouns, proper nouns, and technical terminologies that represent database-related concepts, objects, components, features, issues, key steps, execute order, user case, locations, versions, or any substantial entities.\n      - Ensure that you capture entities across different levels of detail, from high-level overviews to specific technical specifications, to create a comprehensive representation of the subject matter.\n      - Choose names for entities that are specific enough to indicate their meaning without additional context, avoiding overly generic terms.\n      - Consolidate similar entities to avoid redundancy, ensuring each represents a distinct concept at appropriate granularity levels.\n\n    2. Establish Relationships:\n      - Carefully examine the text to identify all relationships between clearly-related entities, ensuring each relationship is correctly captured with accurate details about the interactions.\n      - Analyze the context and interactions between the identified entities to determine how they are interconnected, focusing on actions, associations, dependencies, or similarities.\n      - Clearly define the relationships, ensuring accurate directionality that reflects the logical or functional dependencies among entities. \\\n         This means identifying which entity is the source, which is the target, and what the nature of their relationship is (e.g., $source_entity depends on $target_entity for $relationship).\n\n    Some key points to consider:\n      - Please endeavor to extract all meaningful entities and relationships from the text, avoid subsequent additional gleanings.\n\n    Objective: Produce a detailed and comprehensive knowledge graph that captures the full spectrum of entities mentioned in the text, along with their interrelations, reflecting both broad concepts and intricate details specific to the database domain.\n\n    Please only response in JSON format.\n    \"\"\"\n\n    text = dspy.InputField(\n        desc=\"a paragraph of text to extract entities and relationships to form a knowledge graph\"\n    )\n    knowledge: PredictKnowledgeGraph = dspy.OutputField(\n        desc=\"Graph representation of the knowledge extracted from the text.\"\n    )\n\n\nclass KnowledgeGraphExtractor(dspy.Module):\n    def __init__(self, dspy_lm: dspy.LM):\n        super().__init__()\n        self.dspy_lm = dspy_lm\n        self.program = Predict(ExtractKnowledgeGraph)\n\n    def forward(self, text: str) -> GeneratedKnowledgeGraph:\n        with dspy.settings.context(lm=self.dspy_lm):\n            prediction = self.program(text=text)\n            entities = [\n                GeneratedEntity(\n                    name=entity.name,\n                    description=entity.description,\n                    meta={},\n                )\n                for entity in prediction.knowledge.entities\n            ]\n            relationships = [\n                GeneratedRelationship(\n                    source_entity_name=relationship.source_entity,\n                    target_entity_name=relationship.target_entity,\n                    description=relationship.relationship_desc,\n                    meta={},\n                )\n                for relationship in prediction.knowledge.relationships\n            ]\n            return GeneratedKnowledgeGraph(\n                entities=entities,\n                relationships=relationships,\n            )\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/retrievers/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/knowledge_graph/retrievers/base.py",
    "content": "from abc import abstractmethod, ABC\n\nfrom autoflow.storage.graph_store.base import GraphStore\nfrom autoflow.storage.types import QueryBundle\nfrom autoflow.knowledge_graph.types import RetrievedKnowledgeGraph\n\n\nclass KGRetriever(ABC):\n    def __init__(self, knowledge_graph_store: GraphStore):\n        self._kg_store = knowledge_graph_store\n\n    @abstractmethod\n    def retrieve(\n        self,\n        query: QueryBundle,\n        depth: int = 2,\n        meta_filters: dict = None,\n    ) -> RetrievedKnowledgeGraph:\n        raise NotImplementedError\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/retrievers/simple.py",
    "content": ""
  },
  {
    "path": "core/autoflow/knowledge_graph/retrievers/weighted.py",
    "content": "from collections import defaultdict\nfrom typing import List, Set, Tuple, Optional\n\nfrom autoflow.knowledge_graph.types import (\n    RetrievedKnowledgeGraph,\n    RetrievedRelationship,\n)\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.storage.graph_store import GraphStore\nfrom autoflow.knowledge_graph.retrievers.base import KGRetriever\nfrom autoflow.storage.graph_store.types import (\n    Entity,\n    EntityDegree,\n    EntityFilters,\n    Relationship,\n    EntityType,\n    RelationshipFilters,\n)\nfrom autoflow.storage.types import QueryBundle\n\n\n# The configuration for the weight coefficient\n# format: ((min_weight, max_weight), coefficient)\nDEFAULT_WEIGHT_COEFFICIENTS = [\n    ((0, 100), 0.01),\n    ((100, 1000), 0.001),\n    ((1000, 10000), 0.0001),\n    ((10000, float(\"inf\")), 0.00001),\n]\n\n# The configuration for the range search\n# format: ((min_distance, max_distance), search_ratio)\n# The sum of search ratio should be 1 except some case we want to search as many as possible relationships.\n# In this case, we set the search ratio to 1, and the other search ratio sum should be 1\nDEFAULT_RANGE_SEARCH_CONFIG = [\n    ((0.0, 0.25), 1),\n    ((0.25, 0.35), 0.7),\n    ((0.35, 0.45), 0.2),\n    ((0.45, 0.55), 0.1),\n]\n\nDEFAULT_DEGREE_COEFFICIENT = 0.001\n\n\nclass WeightedGraphRetriever(KGRetriever):\n    def __init__(\n        self,\n        kg_store: GraphStore,\n        embedding_model: EmbeddingModel,\n        with_degree: bool = False,\n        alpha: float = 1,\n        weight_coefficients: List[Tuple[float, float]] = None,\n        search_range_config: List[Tuple[Tuple[float, float], float]] = None,\n        degree_coefficient: float = DEFAULT_DEGREE_COEFFICIENT,\n        fetch_synopsis_entities_num: int = 2,\n        max_neighbors: int = 10,\n    ):\n        super().__init__(kg_store)\n        self._embedding_model = embedding_model\n        self.with_degree = with_degree\n        self.alpha = alpha\n        self.weight_coefficients = weight_coefficients or DEFAULT_WEIGHT_COEFFICIENTS\n        self.search_range_config = search_range_config or DEFAULT_RANGE_SEARCH_CONFIG\n        self.degree_coefficient = degree_coefficient\n        self.fetch_synopsis_entities_num = fetch_synopsis_entities_num\n        self.max_neighbors = max_neighbors\n\n    def retrieve(\n        self,\n        query: str,\n        depth: int = 2,\n        metadata_filters: Optional[dict] = None,\n    ) -> RetrievedKnowledgeGraph:\n        query_embedding = self._embedding_model.get_query_embedding(query)\n\n        visited_relationships = set()\n        visited_entities = set()\n\n        new_relationships = self._weighted_search_relationships(\n            query_embedding=query_embedding,\n            visited_relationships=visited_relationships,\n            visited_entities=visited_entities,\n            metadata_filters=metadata_filters,\n        )\n\n        if len(new_relationships) == 0:\n            return RetrievedKnowledgeGraph(\n                entities=[],\n                relationships=[],\n            )\n\n        for rel, score in new_relationships:\n            visited_relationships.add(\n                RetrievedRelationship(\n                    **rel.model_dump(),\n                    similarity_score=score,\n                    score=score,\n                )\n            )\n            visited_entities.add(rel.source_entity)\n            visited_entities.add(rel.target_entity)\n\n        for _ in range(depth - 1):\n            actual_number = 0\n            progress = 0\n            for search_config in DEFAULT_RANGE_SEARCH_CONFIG:\n                search_ratio = search_config[1]\n                search_distance_range = search_config[0]\n                remaining_number = self.max_neighbors - actual_number\n                # calculate the expected number based search progress\n                # It's an accumulative search, so the expected number should be the difference between the expected number and the actual number\n                expected_number = (\n                    int((search_ratio + progress) * self.max_neighbors - actual_number)\n                    if progress * self.max_neighbors > actual_number\n                    else int(search_ratio * self.max_neighbors)\n                )\n                if expected_number > remaining_number:\n                    expected_number = remaining_number\n                if remaining_number <= 0:\n                    break\n\n                new_relationships = self._weighted_search_relationships(\n                    query_embedding=query_embedding,\n                    visited_relationships=visited_relationships,\n                    visited_entities=visited_entities,\n                    search_distance_range=search_distance_range,\n                    top_k=expected_number,\n                    metadata_filters=metadata_filters,\n                )\n\n                for rel, score in new_relationships:\n                    visited_relationships.add(\n                        RetrievedRelationship(\n                            **rel.model_dump(),\n                            similarity_score=score,\n                            score=score,\n                        )\n                    )\n                    visited_entities.add(rel.source_entity)\n                    visited_entities.add(rel.target_entity)\n\n                actual_number += len(new_relationships)\n                # search_ratio == 1 won't count the progress\n                if search_ratio != 1:\n                    progress += search_ratio\n\n        # Fetch related synopsis entities.\n        synopsis_entities = self._kg_store.search_entities(\n            query=QueryBundle(query_embedding=query_embedding),\n            top_k=self.fetch_synopsis_entities_num,\n            filters=EntityFilters(\n                entity_type=EntityType.synopsis,\n            ),\n        )\n        if len(synopsis_entities) > 0:\n            visited_entities.update(synopsis_entities)\n\n        # Rerank final relationships.\n        return_relationships = list(visited_relationships)\n        return_relationships.sort(key=lambda x: x.score, reverse=True)\n        self._fill_entity(return_relationships)\n\n        return_entities = [Entity(**e.model_dump()) for e in visited_entities]\n\n        return RetrievedKnowledgeGraph(\n            entities=return_entities,\n            relationships=return_relationships,\n        )\n\n    def _fill_entity(self, relationships: List[RetrievedRelationship]):\n        # FIXME: pytidb should return the relationship field: target_entity, source_entity.\n        entity_ids = [item.target_entity_id for item in relationships]\n        entity_ids.extend([item.source_entity_id for item in relationships])\n        entities = self._kg_store.list_entities(\n            filters=EntityFilters(entity_id=entity_ids)\n        )\n        entity_map = {entity.id: entity for entity in entities}\n        for rel in relationships:\n            rel.target_entity = Entity(**entity_map[rel.target_entity_id].model_dump())\n            rel.source_entity = Entity(**entity_map[rel.source_entity_id].model_dump())\n\n    def _weighted_search_relationships(\n        self,\n        query_embedding: List[float],\n        visited_relationships: Set[RetrievedRelationship],\n        visited_entities: Set[Entity],\n        search_distance_range: Tuple[float, float] = (0, 1),\n        top_k: int = 10,\n        metadata_filters: Optional[dict] = None,\n    ) -> List[RetrievedRelationship]:\n        visited_entity_ids = [e.id for e in visited_entities]\n        visited_relationship_ids = [r.id for r in visited_relationships]\n        relationships_with_score = self._kg_store.search_relationships(\n            query=QueryBundle(query_embedding=query_embedding),\n            filters=RelationshipFilters(\n                source_entity_id=visited_entity_ids,\n                exclude_relationship_ids=visited_relationship_ids,\n                metadata=metadata_filters,\n            ),\n            distance_range=search_distance_range,\n            top_k=top_k,\n        )\n\n        return self._rank_relationships(\n            relationships_with_score=relationships_with_score,\n            top_k=top_k,\n        )\n\n    def _rank_relationships(\n        self,\n        relationships_with_score: List[Tuple[Relationship, float]],\n        top_k: int = 10,\n    ) -> List[Tuple[Relationship, float]]:\n        \"\"\"\n        Rerank the relationship based on distance and weight\n        \"\"\"\n        # TODO: the degree can br pre-calc and stored in the database in advanced.\n        if self.with_degree:\n            entity_ids = set()\n            for r, _ in relationships_with_score:\n                entity_ids.add(r.source_entity_id)\n                entity_ids.add(r.target_entity_id)\n            entity_degrees = self._kg_store.bulk_calc_entities_degrees(entity_ids)\n        else:\n            entity_degrees = defaultdict(EntityDegree)\n\n        reranked_relationships = []\n        for r, similarity_score in relationships_with_score:\n            embedding_distance = 1 - similarity_score\n            source_in_degree = entity_degrees[r.source_entity_id].in_degree\n            target_out_degree = entity_degrees[r.target_entity_id].out_degree\n            final_score = self._calc_relationship_weighted_score(\n                embedding_distance,\n                r.weight,\n                source_in_degree,\n                target_out_degree,\n            )\n            reranked_relationships.append((r, final_score))\n\n        # Rerank relationships based on the calculated score.\n        reranked_relationships.sort(key=lambda x: x[1], reverse=True)\n        return reranked_relationships[:top_k]\n\n    def _calc_relationship_weighted_score(\n        self,\n        embedding_distance: float,\n        weight: int = 0,\n        in_degree: int = 0,\n        out_degree: int = 0,\n    ) -> float:\n        weighted_score = self._calc_weight_score(weight)\n        degree_score = 0\n        if self.with_degree:\n            degree_score = self._calc_degree_score(in_degree, out_degree)\n        return self.alpha * (1 / embedding_distance) + weighted_score + degree_score\n\n    def _calc_weight_score(self, weight: float) -> float:\n        weight_score = 0.0\n        remaining_weight = weight\n\n        for weight_range, coefficient in self.weight_coefficients:\n            if remaining_weight <= 0:\n                break\n            lower_bound, upper_bound = weight_range\n            applicable_weight = min(upper_bound - lower_bound, remaining_weight)\n            weight_score += applicable_weight * coefficient\n            remaining_weight -= applicable_weight\n\n        return weight_score\n\n    def _calc_degree_score(self, in_degree: int, out_degree: int) -> float:\n        return (in_degree - out_degree) * self.degree_coefficient\n"
  },
  {
    "path": "core/autoflow/knowledge_graph/types.py",
    "content": "from typing import Any, Dict, List, Optional\nfrom uuid import UUID\n\nfrom pydantic import BaseModel, Field\nfrom autoflow.storage.graph_store.types import (\n    Entity,\n    EntityCreate,\n    KnowledgeGraphCreate,\n    Relationship,\n    RelationshipCreate,\n)\n\n\n# Generated Knowledge Graph\n\n\nclass GeneratedEntity(BaseModel):\n    name: str\n    description: str\n    meta: Dict[str, Any] = Field(default_factory=dict)\n\n\nclass GeneratedRelationship(BaseModel):\n    source_entity_name: str\n    target_entity_name: str\n    description: str\n    meta: Dict[str, Any] = Field(default_factory=dict)\n\n\nclass GeneratedKnowledgeGraph(BaseModel):\n    entities: List[GeneratedEntity]\n    relationships: List[GeneratedRelationship]\n\n    def to_create(\n        self,\n        chunk_id: Optional[UUID] = None,\n        document_id: Optional[UUID] = None,\n    ) -> KnowledgeGraphCreate:\n        return KnowledgeGraphCreate(\n            entities=[\n                EntityCreate(\n                    name=e.name,\n                    description=e.description,\n                    meta=e.meta,\n                )\n                for e in self.entities\n            ],\n            relationships=[\n                RelationshipCreate(\n                    source_entity_name=r.source_entity_name,\n                    target_entity_name=r.target_entity_name,\n                    description=r.description,\n                    meta=r.meta,\n                    weight=0,\n                    chunk_id=chunk_id,\n                    document_id=document_id,\n                )\n                for r in self.relationships\n            ],\n        )\n\n\n# Retrieved Knowledge Graph\n\n\nclass RetrievedEntity(Entity):\n    similarity_score: Optional[float] = Field(default=None)\n    score: Optional[float] = Field(default=None)\n\n    def __hash__(self):\n        return hash(self.id)\n\n    def __eq__(self, other: \"RetrievedEntity\"):\n        return self.id == other.id\n\n\nclass RetrievedRelationship(Relationship):\n    similarity_score: Optional[float] = Field(default=None)\n    score: Optional[float] = Field(default=None)\n\n    def __hash__(self):\n        return hash(self.id)\n\n    def __eq__(self, other: \"RetrievedRelationship\"):\n        return self.id == other.id\n\n\nclass RetrievedKnowledgeGraph(BaseModel):\n    query: Optional[str] = Field(\n        description=\"The query used to retrieve the knowledge graph\",\n        default=None,\n    )\n    entities: List[Entity] = Field(\n        description=\"List of entities in the knowledge graph\", default_factory=list\n    )\n    relationships: List[RetrievedRelationship] = Field(\n        description=\"List of relationships in the knowledge graph\", default_factory=list\n    )\n"
  },
  {
    "path": "core/autoflow/loaders/__init__.py",
    "content": "from .base import Loader\n\n__all__ = [\n    \"Loader\",\n]\n"
  },
  {
    "path": "core/autoflow/loaders/base.py",
    "content": "from abc import abstractmethod\nfrom typing import Generator\n\nfrom autoflow.types import BaseComponent\nfrom autoflow.storage.doc_store import Document\n\n\nclass Loader(BaseComponent):\n    @abstractmethod\n    def load(\n        self, source: str | list[str], **kwargs\n    ) -> Generator[Document, None, None]:\n        raise NotImplementedError\n\n\nclass FileLoader(Loader):\n    def load(self, files: str | list[str], **kwargs) -> Generator[Document, None, None]:\n        if isinstance(files, str):\n            files = [files]\n\n        for file in files:\n            yield self._load_file(file)\n\n    @abstractmethod\n    def _load_file(self, file: str) -> Document:\n        raise NotImplementedError\n"
  },
  {
    "path": "core/autoflow/loaders/helper.py",
    "content": "from autoflow.data_types import DataType\nfrom autoflow.loaders import Loader\n\n\ndef get_loader_for_datatype(datatype: DataType) -> Loader:\n    if datatype == DataType.MARKDOWN:\n        from autoflow.loaders.markdown import MarkdownLoader\n\n        return MarkdownLoader()\n    elif datatype == DataType.PDF:\n        from autoflow.loaders.pdf import PDFLoader\n\n        return PDFLoader()\n    elif datatype == DataType.HTML:\n        from autoflow.loaders.webpage import WebpageLoader\n\n        return WebpageLoader()\n    else:\n        raise ValueError(f\"Unsupported loader for data type: {datatype}\")\n"
  },
  {
    "path": "core/autoflow/loaders/markdown.py",
    "content": "from autoflow.data_types import DataType\nfrom autoflow.loaders.base import FileLoader\nfrom autoflow.storage.doc_store import Document\n\n\nclass MarkdownLoader(FileLoader):\n    def _load_file(self, file: str) -> Document:\n        with open(file, \"r\", encoding=\"utf-8\") as f:\n            content = f.read()\n\n        return Document(\n            name=file,\n            data_type=DataType.MARKDOWN,\n            content=content,\n        )\n"
  },
  {
    "path": "core/autoflow/loaders/pdf.py",
    "content": "from llama_index.readers import PDFReader\n\nfrom autoflow.data_types import DataType\nfrom autoflow.loaders.base import FileLoader\nfrom autoflow.storage.doc_store import Document\n\n\nclass PDFLoader(FileLoader):\n    def _load_file(self, file: str) -> Document:\n        reader = PDFReader()\n        documents = reader.load_data(file)\n        content = documents[0].text\n\n        return Document(\n            name=file.name,\n            data_type=DataType.PDF,\n            content=content,\n        )\n"
  },
  {
    "path": "core/autoflow/loaders/webpage.py",
    "content": "import logging\nfrom datetime import datetime, UTC\nfrom typing import Generator, Optional, List\nfrom playwright.sync_api import sync_playwright\nfrom bs4 import BeautifulSoup\nfrom markdownify import MarkdownConverter\n\nfrom autoflow.loaders.base import Loader\nfrom autoflow.storage.doc_store import Document\nfrom autoflow.data_types import DataType\n\nlogger = logging.getLogger(__name__)\n\n# Common tags and classes to ignore when processing web content\nIGNORE_TAGS = [\"nav\", \"header\", \"footer\", \"script\", \"style\", \"noscript\", \"iframe\"]\nIGNORE_CLASSES = [\n    \"nav\",\n    \"navigation\",\n    \"footer\",\n    \"header\",\n    \"sidebar\",\n    \"menu\",\n    \"ad\",\n    \"advertisement\",\n]\n\n\nclass WebpageLoader(Loader):\n    def __init__(\n        self,\n        ignore_tags: Optional[List[str]] = None,\n        ignore_classes: Optional[List[str]] = None,\n    ):\n        super().__init__()\n        self._ignore_tags = ignore_tags or IGNORE_TAGS\n        self._ignore_classes = ignore_classes or IGNORE_CLASSES\n\n    def load(self, urls: str | list[str], **kwargs) -> Generator[Document, None, None]:\n        if isinstance(urls, str):\n            urls = [urls]\n\n        visited = set()\n        with sync_playwright() as p:\n            browser = p.chromium.launch(headless=True)\n            try:\n                for url in urls:\n                    try:\n                        page = browser.new_page()\n                        response = page.goto(url)\n                        final_url = page.url\n\n                        if final_url in visited:\n                            continue\n\n                        if response is None or response.status >= 400:\n                            logger.error(\n                                f\"Failed to load page: {url}, response status: {response.status if response else 'None'}, skipping\"\n                            )\n                            continue\n\n                        # Parse the content\n                        soup = BeautifulSoup(page.content(), \"html.parser\")\n\n                        # Remove unwanted elements\n                        for tag in self._ignore_tags:\n                            for element in soup.find_all(tag):\n                                element.extract()\n\n                        for class_name in self._ignore_classes:\n                            for element in soup.find_all(class_=class_name):\n                                element.extract()\n\n                        # Convert to markdown\n                        content = MarkdownConverter().convert_soup(soup)\n                        title = page.title() or final_url\n\n                        visited.add(final_url)\n\n                        # Create document\n                        document = Document(\n                            name=title,\n                            content=content,\n                            data_type=DataType.HTML,\n                            meta={\n                                \"source_uri\": final_url,\n                                \"original_uri\": url,\n                                \"last_modified\": datetime.now(UTC).isoformat(),\n                            },\n                        )\n\n                        yield document\n\n                    except Exception as e:\n                        logger.error(f\"Error processing URL {url}: {str(e)}\")\n                        continue\n                    finally:\n                        if \"page\" in locals():\n                            page.close()\n            finally:\n                browser.close()\n"
  },
  {
    "path": "core/autoflow/main.py",
    "content": "from typing import List, Optional\nfrom sqlalchemy.engine import Engine\n\nfrom autoflow.configs.db import DatabaseConfig\nfrom autoflow.configs.knowledge_base import IndexMethod\nfrom autoflow.configs.main import Config\nfrom autoflow.db import get_db_engine_from_config\nfrom autoflow.knowledge_base.base import KnowledgeBase\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.models.llms import LLM\nfrom autoflow.models.manager import ModelManager, model_manager as default_model_manager\nfrom autoflow.models.rerank_models import RerankModel\n\n\nclass Autoflow:\n    _db_engine = None\n\n    def __init__(\n        self,\n        db_engine: Engine,\n        model_manager: Optional[ModelManager] = None,\n    ):\n        self._db_engine = db_engine\n        self._model_manager = model_manager or default_model_manager\n\n    @classmethod\n    def from_config(cls, config: Config) -> \"Autoflow\":\n        db_engine = cls._init_db_engine(config.db)\n        model_manager = ModelManager()\n        return cls(db_engine=db_engine, model_manager=model_manager)\n\n    @classmethod\n    def _init_db_engine(cls, db_config: DatabaseConfig) -> Engine:\n        if db_config.provider != \"tidb\":\n            raise NotImplementedError(\n                f\"Unsupported database provider: {db_config.provider}.\"\n            )\n        return get_db_engine_from_config(db_config)\n\n    @property\n    def db_engine(self) -> Engine:\n        return self._db_engine\n\n    @property\n    def llm_manager(self) -> \"ModelManager\":\n        return self._model_manager\n\n    def create_knowledge_base(\n        self,\n        name: str,\n        namespace: Optional[str] = None,\n        description: Optional[str] = None,\n        index_methods: Optional[List[IndexMethod]] = None,\n        llm: Optional[LLM] = None,\n        embedding_model: Optional[EmbeddingModel] = None,\n        rerank_model: Optional[RerankModel] = None,\n    ):\n        return KnowledgeBase(\n            db_engine=self.db_engine,\n            namespace=namespace,\n            name=name,\n            description=description,\n            index_methods=index_methods,\n            llm=llm,\n            embedding_model=embedding_model,\n            rerank_model=rerank_model,\n        )\n"
  },
  {
    "path": "core/autoflow/models/__init__.py",
    "content": "from .manager import (\n    ModelManager,\n    model_manager,\n)\n\n__all__ = [\n    \"ModelManager\",\n    \"model_manager\",\n]\n"
  },
  {
    "path": "core/autoflow/models/embedding_models/__init__.py",
    "content": "from .litellm import LiteLLMEmbedding\n\nEmbeddingModel = LiteLLMEmbedding\n\n__all__ = [\"EmbeddingModel\"]\n"
  },
  {
    "path": "core/autoflow/models/embedding_models/litellm.py",
    "content": "from typing import Any, List, Optional\nfrom llama_index.core.bridge.pydantic import Field\nfrom llama_index.core.embeddings import BaseEmbedding\n\n\ndef get_embeddings(\n    api_key: str,\n    api_base: str,\n    model_name: str,\n    input: List[str],\n    timeout: int = 60,\n    **kwargs: Any,\n) -> List[List[float]]:\n    \"\"\"\n    Retrieve embeddings for a given list of input strings using the specified model.\n\n    Args:\n        api_key (str): The API key for authentication.\n        api_base (str): The base URL of the LiteLLM proxy server.\n        model_name (str): The name of the model to use for generating embeddings.\n        input (List[str]): A list of input strings for which embeddings are to be generated.\n        timeout (float): The timeout value for the API call, default 60 secs.\n        **kwargs (Any): Additional keyword arguments to be passed to the embedding function.\n\n    Returns:\n        List[List[float]]: A list of embeddings, where each embedding corresponds to an input string.\n    \"\"\"\n    from litellm import embedding\n\n    response = embedding(\n        api_key=api_key,\n        api_base=api_base,\n        model=model_name,\n        input=input,\n        timeout=timeout,\n        **kwargs,\n    )\n    return [result[\"embedding\"] for result in response.data]\n\n\nclass LiteLLMEmbedding(BaseEmbedding):\n    model_name: str = Field(description=\"The name of the embedding model.\")\n    api_key: Optional[str] = Field(\n        default=None,\n        description=\"OpenAI key. If not provided, the proxy server must be configured with the key.\",\n    )\n    api_base: Optional[str] = Field(\n        default=None, description=\"The base URL of the LiteLLM proxy.\"\n    )\n    dimensions: Optional[int] = Field(\n        default=None,\n        description=(\n            \"The number of dimensions the resulting output embeddings should have. \"\n        ),\n    )\n    timeout: Optional[int] = Field(\n        default=60, description=\"Timeout for each request.\", ge=0\n    )\n\n    def __init__(\n        self, model_name: str, *, dimensions: Optional[int] = None, **kwargs\n    ) -> None:\n        super().__init__(model_name=model_name, dimensions=dimensions, **kwargs)\n        if dimensions is None:\n            self.dimensions = len(self._get_text_embedding(\"test\"))\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"lite-llm\"\n\n    async def _aget_query_embedding(self, query: str) -> List[float]:\n        return self._get_query_embedding(query)\n\n    async def _aget_text_embedding(self, text: str) -> List[float]:\n        return self._get_text_embedding(text)\n\n    def _get_query_embedding(self, query: str) -> List[float]:\n        embeddings = get_embeddings(\n            api_key=self.api_key,\n            api_base=self.api_base,\n            model_name=self.model_name,\n            dimensions=self.dimensions,\n            timeout=self.timeout,\n            input=[query],\n        )\n        return embeddings[0]\n\n    def _get_text_embedding(self, text: str) -> List[float]:\n        embeddings = get_embeddings(\n            api_key=self.api_key,\n            api_base=self.api_base,\n            model_name=self.model_name,\n            dimensions=self.dimensions,\n            timeout=self.timeout,\n            input=[text],\n        )\n        return embeddings[0]\n\n    def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:\n        return get_embeddings(\n            api_key=self.api_key,\n            api_base=self.api_base,\n            model_name=self.model_name,\n            dimensions=self.dimensions,\n            timeout=self.timeout,\n            input=texts,\n        )\n"
  },
  {
    "path": "core/autoflow/models/llms/__init__.py",
    "content": "from .litellm import LiteLLM\n\nLLM = LiteLLM\n\n__all__ = [\n    \"LLM\",\n]\n"
  },
  {
    "path": "core/autoflow/models/llms/dspy.py",
    "content": "import dspy\n\nfrom autoflow.models.llms import LLM\n\n\ndef get_dspy_lm_by_llm(llm: LLM) -> dspy.LM:\n    return dspy.LM(\n        model=llm.model,\n        max_tokens=llm.max_tokens,\n        **llm.additional_kwargs,\n    )\n"
  },
  {
    "path": "core/autoflow/models/llms/litellm.py",
    "content": "import logging\nfrom litellm import verbose_logger\nfrom llama_index.llms.litellm import LiteLLM\n\nverbose_logger.setLevel(logging.WARN)\n\nLiteLLM = LiteLLM\n"
  },
  {
    "path": "core/autoflow/models/manager.py",
    "content": "from typing import Dict, Optional\n\nfrom llama_index.core.base.embeddings.base import BaseEmbedding\nfrom llama_index.core.base.llms.base import BaseLLM\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\n\nfrom autoflow.configs.models.providers import ModelProviders\nfrom autoflow.configs.models.embeddings import EmbeddingModelConfig\nfrom autoflow.configs.models.llms import LLMConfig\nfrom autoflow.configs.models.providers.base import ProviderConfig\nfrom autoflow.configs.models.rerankers import RerankerConfig\n\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.models.llms import LLM\nfrom autoflow.models.rerank_models import RerankModel\n\n\nclass ModelManager:\n    _registry: Dict[ModelProviders, ProviderConfig] = {}\n\n    @classmethod\n    def load_from_db(cls):\n        pass\n\n    @classmethod\n    def from_config(cls):\n        pass\n\n    def registry_provider(self, name: ModelProviders, config: ProviderConfig):\n        self._registry[name] = config\n\n    def get_provider_config(self, name: ModelProviders) -> Optional[ProviderConfig]:\n        provider = self._registry.get(name)\n        if provider is None:\n            raise ValueError('Provider \"{}\" is not found.'.format(name))\n        return provider\n\n    def resolve_llm(\n        self,\n        provider: Optional[ModelProviders] = ModelProviders.OPENAI,\n        config: Optional[Dict] = None,\n    ) -> Optional[BaseLLM]:\n        cfg = LLMConfig.model_validate(\n            {\n                \"provider\": provider,\n                \"config\": config,\n            }\n        )\n        provider_config = self.get_provider_config(cfg.provider)\n        merged_config = {\n            **provider_config.model_dump(),\n            **cfg.config.model_dump(),\n            \"model\": f\"{cfg.provider.value}/{cfg.config.model}\",\n        }\n        return LLM(**merged_config)\n\n    def resolve_embedding_model(\n        self,\n        provider: Optional[ModelProviders] = ModelProviders.OPENAI,\n        config: Optional[Dict] = None,\n    ) -> Optional[BaseEmbedding]:\n        cfg = EmbeddingModelConfig.model_validate(\n            {\n                \"provider\": provider,\n                \"config\": config,\n            }\n        )\n\n        provider_config = self.get_provider_config(cfg.provider)\n        merged_config = {\n            **provider_config.model_dump(),\n            **cfg.config.model_dump(exclude={\"model\"}),\n            \"model_name\": f\"{cfg.provider.value}/{cfg.config.model}\",\n        }\n        return EmbeddingModel(**merged_config)\n\n    def resolve_rerank_model(\n        self,\n        provider: Optional[ModelProviders] = ModelProviders.OPENAI,\n        config: Optional[Dict] = None,\n    ) -> Optional[BaseNodePostprocessor]:\n        cfg = RerankerConfig.model_validate(\n            {\n                \"provider\": provider,\n                \"config\": config,\n            }\n        )\n        provider_config = self.get_provider_config(cfg.provider)\n        merged_config = {\n            **provider_config.model_dump(),\n            **cfg.config.model_dump(exclude={\"model\"}),\n            \"model\": f\"{cfg.provider.value}/{cfg.config.model}\",\n        }\n        return RerankModel(**merged_config)\n\n\nmodel_manager: ModelManager = ModelManager()\n"
  },
  {
    "path": "core/autoflow/models/provider.py",
    "content": "from abc import ABC\n\n\nclass ProviderRegistry(ABC):\n    def register(self, name: str):\n        pass\n\n    def get_provider_credentials(self):\n        pass\n"
  },
  {
    "path": "core/autoflow/models/rerank_models/__init__.py",
    "content": "from .litellm import LiteLLMReranker\n\nRerankModel = LiteLLMReranker\n\n__all__ = [\"RerankModel\"]\n"
  },
  {
    "path": "core/autoflow/models/rerank_models/litellm.py",
    "content": "from typing import List, Optional\n\nfrom litellm.rerank_api.main import rerank\nfrom llama_index.core.bridge.pydantic import Field\nfrom llama_index.core.callbacks import CBEventType, EventPayload\nfrom llama_index.core.instrumentation import get_dispatcher\nfrom llama_index.core.instrumentation.events.rerank import (\n    ReRankEndEvent,\n    ReRankStartEvent,\n)\nfrom llama_index.core.postprocessor.types import BaseNodePostprocessor\nfrom llama_index.core.schema import NodeWithScore, QueryBundle, MetadataMode\n\ndispatcher = get_dispatcher(__name__)\n\n\nclass LiteLLMReranker(BaseNodePostprocessor):\n    model: str = Field(description=\"Reranker model name.\")\n    top_n: int = Field(description=\"Top N nodes to return.\")\n    api_base: Optional[str] = Field(description=\"Reranker API base url.\", default=None)\n    api_key: Optional[str] = Field(description=\"Reranker API key.\")\n\n    def __init__(\n        self,\n        top_n: int = 2,\n        model: str = \"rerank_models-english-v2.0\",\n        api_key: Optional[str] = None,\n        api_base: Optional[str] = None,\n    ):\n        super().__init__(top_n=top_n, model=model, api_base=api_base, api_key=api_key)\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"LiteLLMRerank\"\n\n    def _postprocess_nodes(\n        self,\n        nodes: List[NodeWithScore],\n        query_bundle: Optional[QueryBundle] = None,\n    ) -> List[NodeWithScore]:\n        dispatcher.event(\n            ReRankStartEvent(\n                query=query_bundle, nodes=nodes, top_n=self.top_n, model_name=self.model\n            )\n        )\n\n        if query_bundle is None:\n            raise ValueError(\"Missing query bundle in extra info.\")\n        if len(nodes) == 0:\n            return []\n\n        with self.callback_manager.event(\n            CBEventType.RERANKING,\n            payload={\n                EventPayload.NODES: nodes,\n                EventPayload.MODEL_NAME: self.model,\n                EventPayload.QUERY_STR: query_bundle.query_str,\n                EventPayload.TOP_K: self.top_n,\n            },\n        ) as event:\n            texts = [\n                node.node.get_content(metadata_mode=MetadataMode.EMBED)\n                for node in nodes\n            ]\n            results = rerank(\n                model=self.model,\n                query=query_bundle.query_str,\n                documents=texts,\n                top_n=self.top_n,\n                api_base=self.api_base,\n                api_key=self.api_key,\n            )\n\n            new_nodes = []\n            for result in results.results:\n                new_node_with_score = NodeWithScore(\n                    node=nodes[result[\"index\"]].node, score=result[\"relevance_score\"]\n                )\n                new_nodes.append(new_node_with_score)\n            event.on_end(payload={EventPayload.NODES: new_nodes})\n\n        dispatcher.event(ReRankEndEvent(nodes=new_nodes))\n        return new_nodes\n"
  },
  {
    "path": "core/autoflow/orms/__init__.py",
    "content": "from .base import UUIDBaseModel\n\n__all__ = [\n    \"UUIDBaseModel\",\n]\n"
  },
  {
    "path": "core/autoflow/orms/base.py",
    "content": "import uuid\nfrom datetime import datetime\nfrom typing import Optional\n\nfrom pytidb.schema import TableModel, Field\nfrom pytidb.datatype import DateTime\nfrom pytidb.sql import func\n\nfrom autoflow.utils import uuid6\n\n\nclass UUIDBaseModel(TableModel):\n    id: uuid.UUID = Field(default_factory=uuid6.uuid7, primary_key=True)\n    # Use sa_type instead of sa_column, refer to https://github.com/tiangolo/sqlmodel/discussions/743\n    created_at: Optional[datetime] = Field(\n        sa_type=DateTime(timezone=True),\n        sa_column_kwargs={\"server_default\": func.now()},\n    )\n    updated_at: Optional[datetime] = Field(\n        sa_type=DateTime(timezone=True),\n        sa_column_kwargs={\n            \"server_default\": func.now(),\n            \"onupdate\": func.now(),\n        },\n    )\n"
  },
  {
    "path": "core/autoflow/py.typed",
    "content": ""
  },
  {
    "path": "core/autoflow/storage/__init__.py",
    "content": ""
  },
  {
    "path": "core/autoflow/storage/doc_store/__init__.py",
    "content": "from .base import DocumentStore\nfrom .types import Document, DocumentSearchResult, Chunk\n\n__all__ = [\n    \"DocumentStore\",\n    \"DocumentSearchResult\",\n    \"Document\",\n    \"Chunk\",\n]\n"
  },
  {
    "path": "core/autoflow/storage/doc_store/base.py",
    "content": "from uuid import UUID\nfrom abc import ABC, abstractmethod\nfrom typing import Any, Dict, List, Optional\n\nfrom autoflow.storage.doc_store.types import Chunk, Document, DocumentSearchResult\n\n\nclass DocumentStore(ABC):\n    @abstractmethod\n    def add(self, documents: List[Document]) -> List[Document]:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def update(self, document_id: UUID, update: Dict[str, Any]):\n        raise NotImplementedError()\n\n    @abstractmethod\n    def delete(self, document_id: UUID) -> None:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def list(self, filters: Dict[str, Any] = None) -> List[Document]:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def search(\n        self,\n        query: str,\n        top_k: Optional[int] = None,\n        similarity_candidate: Optional[int] = None,\n    ) -> DocumentSearchResult:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def get(self, document_id: UUID) -> Document:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def add_doc_chunks(self, document_id: UUID, chunks: List[Chunk]) -> List[Chunk]:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def list_doc_chunks(self, document_id: UUID) -> List[Chunk]:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def get_chunk(self, chunk_id: UUID) -> Chunk:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def update_chunk(self, chunk_id: UUID, update: Dict[str, Any]) -> Chunk:\n        raise NotImplementedError()\n\n    @abstractmethod\n    def delete_chunk(self, chunk_id: UUID) -> None:\n        raise NotImplementedError()\n"
  },
  {
    "path": "core/autoflow/storage/doc_store/tidb_doc_store.py",
    "content": "import logging\nfrom uuid import UUID\nfrom typing import Any, Dict, List, Optional, Type\n\nfrom pydantic import PrivateAttr\n\nfrom pytidb import TiDBClient, Table\nfrom pytidb.embeddings import EmbeddingFunction\nfrom pytidb.schema import TableModel, Field, Column, Relationship as SQLRelationship\nfrom pytidb.datatype import Vector, JSON\nfrom pytidb.search import SearchType\nfrom sqlalchemy.dialects.mysql import LONGTEXT\n\nfrom autoflow.data_types import DataType\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.orms.base import UUIDBaseModel\nfrom autoflow.storage.doc_store.types import (\n    Document,\n    DocumentDescriptor,\n    Chunk,\n    RetrievedChunk,\n    DocumentSearchResult,\n)\nfrom autoflow.types import SearchMode\nfrom autoflow.storage.doc_store.base import DocumentStore\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef dynamic_create_models(\n    namespace: Optional[str] = None,\n    embedding_model: Optional[EmbeddingModel] = None,\n    vector_dims: Optional[int] = None,\n) -> tuple[Type[TableModel], Type[TableModel]]:\n    if embedding_model is None and vector_dims is None:\n        raise ValueError(\"Either `embedding_model` or `vector_dims` must be specified\")\n\n    # Determine the table names.\n    suffix = f\"_{namespace}\" if namespace else \"\"\n    document_table_name = f\"documents{suffix}\"\n    document_model_name = f\"DBDocument{suffix}\"\n    chunk_table_name = f\"chunks{suffix}\"\n    chunk_model_name = f\"DBChunk{suffix}\"\n\n    # Initialize the document table model.\n    class DBDocument(UUIDBaseModel):\n        hash: str = Field(max_length=128)\n        name: str = Field(max_length=256)\n        content: str = Field(sa_column=Column(LONGTEXT))\n        data_type: Optional[DataType] = Field(default=None)\n        meta: dict = Field(default_factory=dict, sa_column=Column(JSON))\n\n    document_model = type(\n        document_model_name,\n        (DBDocument,),\n        {\n            \"__tablename__\": document_table_name,\n            \"__table_args__\": {\"extend_existing\": True},\n        },\n        table=True,\n    )\n\n    # Initialize the chunk table model.\n    if embedding_model is not None:\n        embed_fn = EmbeddingFunction(\n            model_name=embedding_model.model_name,\n            dimensions=embedding_model.dimensions,\n            api_key=embedding_model.api_key,\n            api_base=embedding_model.api_base,\n            timeout=embedding_model.timeout,\n        )\n        vector_field = embed_fn.VectorField(source_field=\"text\")\n    else:\n        vector_field = Field(default=None, sa_column=Column(Vector(vector_dims)))\n\n    class DBChunk(UUIDBaseModel):\n        text: str = Field(sa_column=Column(LONGTEXT))\n        text_vec: Optional[Any] = vector_field\n        document_id: UUID = Field(foreign_key=f\"{document_table_name}.id\")\n\n    chunk_model = type(\n        chunk_model_name,\n        (DBChunk,),\n        {\n            \"__tablename__\": chunk_table_name,\n            \"__table_args__\": {\"extend_existing\": True},\n            \"__annotations__\": {\n                \"document\": Optional[document_model],\n            },\n            \"document\": SQLRelationship(\n                sa_relationship_kwargs={\n                    \"cascade\": \"all, delete\",\n                },\n            ),\n        },\n        table=True,\n    )\n\n    return document_model, chunk_model\n\n\nclass TiDBDocumentStore(DocumentStore):\n    _client: TiDBClient = PrivateAttr()\n    _document_db_model: Type[Type[TableModel]] = PrivateAttr()\n    _document_table: Table = PrivateAttr()\n    _chunk_db_model: Type[Type[TableModel]] = PrivateAttr()\n    _chunk_table: Table = PrivateAttr()\n\n    def __init__(\n        self,\n        client: TiDBClient,\n        namespace: Optional[str] = None,\n        embedding_model: Optional[EmbeddingModel] = None,\n        vector_dims: Optional[int] = None,\n    ) -> None:\n        super().__init__()\n        self._client = client\n        self._db_engine = self._client.db_engine\n        self._embedding_model = embedding_model\n        self._init_store(namespace, vector_dims)\n\n    @classmethod\n    def class_name(cls) -> str:\n        return \"TiDBDocumentStore\"\n\n    def _init_store(\n        self, namespace: Optional[str] = None, vector_dims: Optional[int] = None\n    ):\n        self._document_db_model, self._chunk_db_model = dynamic_create_models(\n            namespace=namespace,\n            vector_dims=vector_dims,\n            embedding_model=self._embedding_model,\n        )\n        self._document_table = self._client.create_table(schema=self._document_db_model)\n        self._chunk_table = self._client.create_table(schema=self._chunk_db_model)\n\n    # Document Operations.\n\n    def add(self, documents: List[Document]) -> List[Document]:\n        \"\"\"\n        Add documents.\n        \"\"\"\n        return_documents = []\n        for doc in documents:\n            db_document = self._document_db_model(**doc.model_dump(exclude={\"chunks\"}))\n            db_document = self._document_table.insert(db_document)\n\n            return_chunks = []\n            if doc.chunks is not None and len(doc.chunks) > 0:\n                db_chunks = self.add_doc_chunks(db_document.id, doc.chunks)\n                return_chunks = [\n                    Chunk(**db_chunk.model_dump(exclude={\"document\"}))\n                    for db_chunk in db_chunks\n                ]\n\n            return_documents.append(\n                Document(**db_document.model_dump(), chunks=return_chunks)\n            )\n\n        return return_documents\n\n    def update(self, document_id: UUID, update: Dict[str, Any]) -> None:\n        \"\"\"\n        Update documents.\n        \"\"\"\n        self._document_table.update(update, {\"id\": document_id})\n\n    def delete(self, document_id: UUID) -> None:\n        \"\"\"\n        Delete document by id.\n\n        Note: The related chunks will also be deleted by cascade.\n\n        Args:\n            document_id: The id of the document to delete.\n        \"\"\"\n        return self._document_table.delete({\"id\": document_id})\n\n    def get(self, document_id: UUID) -> Document:\n        \"\"\"\n        Get document by id.\n        \"\"\"\n        db_document = self._document_table.get(document_id)\n        return Document(**db_document.model_dump())\n\n    # TODO: Support pagination.\n    def list(self, filters: Dict[str, Any] = None) -> List[Document]:\n        \"\"\"\n        List all documents.\n        \"\"\"\n        db_documents = self._document_table.query(filters)\n        return [Document(**d.model_dump()) for d in db_documents]\n\n    def search(\n        self,\n        query: str | List[float],\n        mode: SearchMode = \"vector\",\n        top_k: Optional[int] = None,\n        similarity_threshold: Optional[float] = None,\n        num_candidate: Optional[int] = None,\n        full_document: Optional[bool] = None,\n    ) -> DocumentSearchResult:\n        # TODO: Support Fulltext search.\n        # TODO: Support Hybrid search.\n        if mode != \"vector\":\n            raise NotImplementedError(\n                \".search() only supports vector search currently, fulltext and hybird search will be coming soon.\"\n            )\n\n        db_chunks = (\n            self._chunk_table.search(query, query_type=SearchType.VECTOR_SEARCH)\n            .distance_threshold(\n                (1 - similarity_threshold) if similarity_threshold is not None else None\n            )\n            .num_candidate(num_candidate)\n            .limit(top_k)\n            .to_pydantic(with_score=True)\n        )\n        document_ids = [c.document_id for c in db_chunks]\n        db_documents = self.list(\n            {\n                \"id\": {\"$in\": document_ids},\n            }\n        )\n        return self._convert_to_retrieval_result(db_chunks, db_documents, full_document)\n\n    def _convert_to_retrieval_result(\n        self,\n        db_chunks: List[TableModel],\n        db_documents: List[TableModel],\n        full_document: bool,\n    ) -> DocumentSearchResult:\n        return DocumentSearchResult(\n            chunks=[\n                RetrievedChunk(\n                    **c.hit.model_dump(),\n                    similarity_score=c.similarity_score,\n                    score=c.score,\n                )\n                for c in db_chunks\n            ],\n            documents=[\n                Document(**d.model_dump())\n                if full_document\n                else DocumentDescriptor(**d.model_dump())\n                for d in db_documents\n            ],\n        )\n\n    # Chunk Operations.\n\n    def add_doc_chunks(self, document_id: UUID, chunks: List[Chunk]) -> List[Chunk]:\n        \"\"\"\n        Add document chunks.\n        \"\"\"\n        db_chunks = [\n            self._chunk_db_model(\n                **c.model_dump(exclude={\"document_id\"}), document_id=document_id\n            )\n            for c in chunks\n        ]\n        db_chunks = self._chunk_table.bulk_insert(db_chunks)\n        return [Chunk(**c.model_dump(exclude={\"document\"})) for c in db_chunks]\n\n    def list_doc_chunks(self, document_id: UUID) -> List[Chunk]:\n        \"\"\"\n        List document chunks.\n        \"\"\"\n        return self._chunk_table.query({\"document_id\": document_id})\n\n    def get_chunk(self, chunk_id: UUID) -> Chunk:\n        \"\"\"\n        Get chunk by id.\n        \"\"\"\n        chunk = self._chunk_table.get(chunk_id)\n        return Chunk(**chunk.model_dump(exclude={\"document\"}))\n\n    def delete_chunk(self, chunk_id: UUID) -> None:\n        \"\"\"\n        Delete document chunk.\n        \"\"\"\n        return self._chunk_table.delete({\"id\": chunk_id})\n\n    def update_chunk(self, chunk_id: UUID, update: Dict[str, Any]) -> Chunk:\n        \"\"\"\n        Update chunk.\n        \"\"\"\n        self._chunk_table.update(update, {\"id\": chunk_id})\n        return self.get_chunk(chunk_id)\n\n    # Document Store Operations.\n\n    def recreate(self) -> None:\n        self._client.drop_table(self._chunk_table.table_name)\n        self._client.drop_table(self._document_table.table_name)\n        self._document_table = self._client.create_table(schema=self._document_db_model)\n        self._chunk_table = self._client.create_table(schema=self._chunk_db_model)\n\n    def reset(self) -> None:\n        with self._client.session():\n            self._client.execute(\"SET FOREIGN_KEY_CHECKS = 0\")\n            self._chunk_table.truncate()\n            self._document_table.truncate()\n            self._client.execute(\"SET FOREIGN_KEY_CHECKS = 1\")\n"
  },
  {
    "path": "core/autoflow/storage/doc_store/types.py",
    "content": "from uuid import UUID\nfrom datetime import datetime\nfrom typing import Optional, List, Any\n\nfrom pydantic import BaseModel, Field, computed_field\n\nfrom autoflow.data_types import DataType\nfrom autoflow.utils import uuid6\nfrom autoflow.utils.hash import sha256\n\n\n# Chunk\n\n\nclass Chunk(BaseModel):\n    id: Optional[UUID] = Field(default_factory=uuid6.uuid7)\n    text: str = Field(description=\"The text of the chunk.\")\n    text_vec: Optional[Any] = Field(\n        default=None, description=\"The vector of text vectors.\"\n    )\n    meta: Optional[dict] = Field(\n        default_factory=dict, description=\"The metadata of the chunk.\"\n    )\n    document_id: Optional[UUID] = Field(\n        default=None, description=\"The id of the document that the chunk belongs to.\"\n    )\n    created_at: datetime = Field(default=None, description=\"The created time\")\n    updated_at: datetime = Field(default=None, description=\"The updated time\")\n\n    @computed_field\n    @property\n    def hash(self) -> Optional[str]:\n        return sha256(self.text)\n\n\nclass RetrievedChunk(Chunk):\n    score: Optional[float] = Field(description=\"The score of the chunk.\", default=None)\n    similarity_score: Optional[float] = Field(\n        default=None, description=\"The similarity score of the chunk.\"\n    )\n\n\n# Document\n\n\nclass Document(BaseModel):\n    id: Optional[UUID] = Field(default_factory=uuid6.uuid7)\n    name: Optional[str] = Field(None, description=\"The name of the document.\")\n    content: str = Field(description=\"The content of the document.\")\n    data_type: Optional[DataType] = Field(\n        default=None, description=\"The data type of the document.\"\n    )\n    meta: Optional[dict] = Field(\n        default_factory=dict, description=\"The metadata of the document.\"\n    )\n    created_at: Optional[datetime] = Field(default=None, description=\"The created time\")\n    updated_at: Optional[datetime] = Field(default=None, description=\"The updated time\")\n    chunks: Optional[List[Chunk]] = Field(\n        default_factory=list, description=\"The chunks of the document.\"\n    )\n\n    @computed_field\n    @property\n    def hash(self) -> Optional[str]:\n        return sha256(self.content)\n\n\nclass DocumentDescriptor(BaseModel):\n    id: UUID\n    name: str\n    # source_uri: str\n\n\nclass DocumentSearchResult(BaseModel):\n    chunks: List[RetrievedChunk] = Field(\n        default_factory=list, description=\"The chunks of the search result.\"\n    )\n    documents: List[DocumentDescriptor | Document] = Field(\n        default_factory=list,\n        description=\"The aggregated documents of the search result.\",\n    )\n"
  },
  {
    "path": "core/autoflow/storage/graph_store/__init__.py",
    "content": "from .base import GraphStore\nfrom .tidb_graph_store import TiDBGraphStore\n\n__all__ = [\"GraphStore\", \"TiDBGraphStore\"]\n"
  },
  {
    "path": "core/autoflow/storage/graph_store/base.py",
    "content": "from abc import ABC\n\nimport logging\nfrom typing import (\n    Collection,\n    Dict,\n    List,\n    Optional,\n    Tuple,\n)\nfrom uuid import UUID\n\nfrom autoflow.storage.types import QueryBundle\nfrom autoflow.types import BaseComponent\nfrom autoflow.storage.graph_store.types import (\n    Entity,\n    EntityFilters,\n    EntityType,\n    EntityUpdate,\n    EntityDegree,\n    KnowledgeGraph,\n    KnowledgeGraphCreate,\n    Relationship,\n    RelationshipFilters,\n    RelationshipUpdate,\n)\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass GraphStore(BaseComponent, ABC):\n    \"\"\"Abstract base class for knowledge graph storage\"\"\"\n\n    # Entity Basic Operations\n\n    def list_entities(\n        self, filters: Optional[EntityFilters] = EntityFilters()\n    ) -> List[Entity]:\n        \"\"\"List all entities matching the filters\"\"\"\n        raise NotImplementedError\n\n    def search_entities(\n        self,\n        query: QueryBundle,\n        top_k: int = 10,\n        num_candidate: Optional[int] = None,\n        distance_threshold: Optional[float] = None,\n        filters: Optional[EntityFilters] = None,\n    ) -> List[Tuple[Entity, float]]:\n        raise NotImplementedError\n\n    def get_entity(self, entity_id: UUID) -> Entity:\n        \"\"\"Get entity by ID\"\"\"\n        raise NotImplementedError\n\n    def must_get_entity(self, entity_id: UUID) -> Entity:\n        entity = self.get_entity(entity_id)\n        if entity is None:\n            raise ValueError(f\"Can not find the entity #{entity_id}\")\n        return entity\n\n    def create_entity(\n        self,\n        name: str,\n        entity_type: EntityType = EntityType.original,\n        description: Optional[str] = None,\n        meta: Optional[dict] = None,\n        embedding: Optional[list[float]] = None,\n    ) -> Entity:\n        \"\"\"Create a new entity\"\"\"\n        raise NotImplementedError\n\n    def update_entity(self, entity: Entity | UUID, update: EntityUpdate) -> Entity:\n        \"\"\"Update an existing entity\"\"\"\n        raise NotImplementedError\n\n    def delete_entity(self, entity_id: UUID) -> None:\n        \"\"\"Delete an entity\"\"\"\n        raise NotImplementedError\n\n    def delete_orphan_entities(self):\n        \"\"\"Remove entities that have no relationships\"\"\"\n        raise NotImplementedError\n\n    # Entity Degree Operations\n\n    def calc_entity_out_degree(self, entity_id: UUID) -> int:\n        \"\"\"Calculate out-degree of an entity\"\"\"\n        raise NotImplementedError\n\n    def calc_entity_in_degree(self, entity_id: UUID) -> int:\n        \"\"\"Calculate in-degree of an entity\"\"\"\n        raise NotImplementedError\n\n    def calc_entity_degree(self, entity_id: UUID) -> int:\n        \"\"\"Calculate total degree of an entity\"\"\"\n        raise NotImplementedError\n\n    def calc_entities_degrees(\n        self, entity_ids: Collection[UUID]\n    ) -> Dict[UUID, EntityDegree]:\n        \"\"\"Calculate degrees for multiple entities in bulk\"\"\"\n        raise NotImplementedError\n\n    # Relationship Basic Operations\n\n    def get_relationship(self, relationship_id: UUID) -> Relationship:\n        \"\"\"Get relationship by ID\"\"\"\n        raise NotImplementedError\n\n    def list_relationships(self, filters: RelationshipFilters) -> List[Relationship]:\n        \"\"\"List all relationships matching the filters\"\"\"\n        raise NotImplementedError\n\n    def create_relationship(\n        self,\n        source_entity: Entity,\n        target_entity: Entity,\n        description: Optional[str] = None,\n        meta: Optional[dict] = {},\n        **kwargs,\n    ) -> Relationship:\n        \"\"\"Create a new relationship between entities\"\"\"\n        raise NotImplementedError\n\n    def update_relationship(\n        self, relationship: Relationship | UUID, update: RelationshipUpdate\n    ) -> Relationship:\n        \"\"\"Update an existing relationship\"\"\"\n        raise NotImplementedError\n\n    def delete_relationship(self, relationship_id: UUID):\n        \"\"\"Delete a relationship\"\"\"\n        raise NotImplementedError\n\n    def search_relationships(\n        self,\n        query: QueryBundle,\n        top_k: int = 10,\n        num_candidate: Optional[int] = None,\n        distance_threshold: Optional[float] = None,\n        distance_range: Optional[Tuple[float, float]] = None,\n        filters: Optional[RelationshipFilters] = None,\n    ) -> List[Tuple[Relationship, float]]:\n        \"\"\"\n\n        Args:\n            query:\n            top_k:\n            num_candidate:\n            distance_threshold:\n            distance_range:\n            filters:\n        \"\"\"\n        raise NotImplementedError\n\n    def reset(self):\n        \"\"\"Reset the graph store\"\"\"\n        raise NotImplementedError\n\n    def drop(self):\n        \"\"\"Drop the graph store\"\"\"\n        raise NotImplementedError\n\n    # Knowledge Graph Operations\n\n    def add(self, knowledge_graph: KnowledgeGraphCreate) -> Optional[KnowledgeGraph]:\n        \"\"\"Add a knowledge graph to the graph store\"\"\"\n        raise NotImplementedError\n"
  },
  {
    "path": "core/autoflow/storage/graph_store/tidb_graph_store.py",
    "content": "import logging\nfrom typing import Collection, Dict, List, Optional, Tuple, Type, Any\nfrom uuid import UUID\n\nfrom pydantic import PrivateAttr\nfrom pytidb import Table, TiDBClient\nfrom pytidb.datatype import JSON, Text\nfrom pytidb.schema import (\n    Column,\n    Field,\n    Relationship as SQLRelationship,\n    TableModel,\n    VectorField,\n)\nfrom pytidb.sql import func, select, or_\nfrom pytidb.embeddings import EmbeddingFunction\nfrom sqlalchemy import Index\n\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.orms.base import UUIDBaseModel\nfrom autoflow.storage.graph_store.base import GraphStore\nfrom autoflow.storage.graph_store.types import (\n    Entity,\n    EntityDegree,\n    EntityFilters,\n    EntityType,\n    EntityUpdate,\n    KnowledgeGraph,\n    KnowledgeGraphCreate,\n    Relationship,\n    RelationshipFilters,\n    RelationshipUpdate,\n)\nfrom autoflow.storage.types import QueryBundle\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef dynamic_create_models(\n    namespace: Optional[str] = None,\n    embedding_model: Optional[EmbeddingModel] = None,\n    vector_dims: Optional[int] = None,\n) -> tuple[Type[TableModel], Type[TableModel]]:\n    if embedding_model is None and vector_dims is None:\n        raise ValueError(\"Either `embedding_model` or `vector_dims` must be specified\")\n\n    # Determine the table names.\n    suffix = f\"_{namespace}\" if namespace else \"\"\n    entity_table_name = f\"entities{suffix}\"\n    relationship_table_name = f\"relationships{suffix}\"\n    entity_model_name = f\"DBEntity{suffix}\"\n    relationship_model_name = f\"DBRelationship{suffix}\"\n\n    # Embedding function.\n    embed_fn = None\n    if embedding_model is not None:\n        embed_fn = EmbeddingFunction(\n            model_name=embedding_model.model_name,\n            dimensions=embedding_model.dimensions,\n            api_key=embedding_model.api_key,\n            api_base=embedding_model.api_base,\n            timeout=embedding_model.timeout,\n        )\n\n    # Initialize the entity model.\n    if embed_fn is not None:\n        entity_vector_field = embed_fn.VectorField()\n    else:\n        entity_vector_field = VectorField(vector_dims)\n\n    class DBEntity(UUIDBaseModel):\n        __table_args__ = (\n            Index(\"idx_entity_type\", \"entity_type\"),\n            Index(\"idx_entity_name\", \"name\"),\n        )\n        entity_type: EntityType = EntityType.original\n        name: str = Field(max_length=512)\n        description: str = Field(sa_column=Column(Text))\n        meta: Optional[Dict] = Field(default_factory=dict, sa_column=Column(JSON))\n        embedding: Optional[Any] = entity_vector_field\n\n        def __hash__(self):\n            return hash(self.id)\n\n        def __eq__(self, other):\n            return self.id == other.id\n\n    entity_model = type(\n        entity_model_name,\n        (DBEntity,),\n        {\n            \"__tablename__\": entity_table_name,\n            \"__table_args__\": {\n                \"extend_existing\": True,\n            },\n        },\n        table=True,\n    )\n\n    # Initialize the relationship model.\n    if embed_fn is not None:\n        relationship_vector_field = embed_fn.VectorField()\n    else:\n        relationship_vector_field = VectorField(vector_dims)\n\n    class DBRelationship(UUIDBaseModel):\n        description: str = Field(sa_column=Column(Text))\n        source_entity_id: UUID = Field(foreign_key=f\"{entity_table_name}.id\")\n        target_entity_id: UUID = Field(foreign_key=f\"{entity_table_name}.id\")\n        meta: Optional[Dict] = Field(default_factory=dict, sa_column=Column(JSON))\n        embedding: Optional[Any] = relationship_vector_field\n        weight: Optional[float] = Field(default=0)\n        chunk_id: Optional[UUID] = Field(default=None)\n        document_id: Optional[UUID] = Field(default=None)\n\n        def __hash__(self):\n            return hash(self.id)\n\n        def __eq__(self, other):\n            return self.id == other.id\n\n    relationship_model = type(\n        relationship_model_name,\n        (DBRelationship,),\n        {\n            \"__tablename__\": relationship_table_name,\n            \"__table_args__\": {\"extend_existing\": True},\n            \"__annotations__\": {\n                \"source_entity\": entity_model,\n                \"target_entity\": entity_model,\n            },\n            \"source_entity\": SQLRelationship(\n                sa_relationship_kwargs={\n                    \"primaryjoin\": f\"{relationship_model_name}.source_entity_id == {entity_model_name}.id\",\n                    \"lazy\": \"joined\",\n                },\n            ),\n            \"target_entity\": SQLRelationship(\n                sa_relationship_kwargs={\n                    \"primaryjoin\": f\"{relationship_model_name}.target_entity_id == {entity_model_name}.id\",\n                    \"lazy\": \"joined\",\n                },\n            ),\n        },\n        table=True,\n    )\n\n    return entity_model, relationship_model\n\n\nclass TiDBGraphStore(GraphStore):\n    _db: TiDBClient = PrivateAttr()\n    _entity_db_model: Type[TableModel] = PrivateAttr()\n    _entity_table: Table = PrivateAttr()\n    _relationship_db_model: Type[TableModel] = PrivateAttr()\n    _relationship_table: Table = PrivateAttr()\n\n    def __init__(\n        self,\n        client: TiDBClient,\n        namespace: Optional[str] = None,\n        embedding_model: Optional[EmbeddingModel] = None,\n        vector_dims: Optional[int] = None,\n        entity_distance_threshold: Optional[float] = 0.1,\n    ):\n        super().__init__()\n        self._db = client\n        self._db_engine = client.db_engine\n        self._embedding_model = embedding_model\n        self._entity_distance_threshold = entity_distance_threshold\n        self._init_store(namespace, vector_dims)\n\n    def _init_store(\n        self, namespace: Optional[str] = None, vector_dims: Optional[int] = None\n    ):\n        self._entity_db_model, self._relationship_db_model = dynamic_create_models(\n            namespace=namespace,\n            vector_dims=vector_dims,\n            embedding_model=self._embedding_model,\n        )\n        self._entity_table = self._db.create_table(schema=self._entity_db_model)\n        self._relationship_table = self._db.create_table(\n            schema=self._relationship_db_model\n        )\n\n    # Entity Basic Operations\n\n    def get_entity(self, entity_id: UUID) -> Entity:\n        return self._entity_table.get(entity_id)\n\n    def list_entities(\n        self, filters: Optional[EntityFilters] = EntityFilters(), **kwargs\n    ) -> List[Entity]:\n        if isinstance(kwargs, dict):\n            filters = filters.model_copy(update=kwargs)\n        filter_dict = self._convert_entity_filters(filters)\n        return self._entity_table.query(filter_dict)\n\n    def search_entities(\n        self,\n        query: QueryBundle,\n        top_k: int = 10,\n        num_candidate: Optional[int] = None,\n        distance_threshold: Optional[float] = None,\n        filters: Optional[EntityFilters] = None,\n    ) -> List[Tuple[Entity, float]]:\n        filter_dict = self._convert_entity_filters(filters)\n        results = (\n            self._entity_table.search(query.query_embedding or query.query_str)\n            .num_candidate(num_candidate or top_k * 10)\n            .filter(filter_dict)\n            .distance_threshold(distance_threshold)\n            .limit(top_k)\n            .to_pydantic()\n        )\n        return [(item.hit, item.score) for item in results]\n\n    def _convert_entity_filters(self, filters: Optional[EntityFilters]) -> dict:\n        filter_dict = {}\n        if filters is None:\n            return filter_dict\n        if filters.entity_type:\n            filter_dict[\"entity_type\"] = filters.entity_type.value\n        if filters.entity_id:\n            op = \"$in\" if isinstance(filters.entity_id, list) else \"$eq\"\n            filter_dict[\"id\"] = {op: filters.entity_id}\n        return filter_dict\n\n    def create_entity(\n        self,\n        name: str,\n        entity_type: EntityType = EntityType.original,\n        description: Optional[str] = None,\n        meta: Optional[dict] = None,\n        embedding: Optional[list[float]] = None,\n    ) -> Entity:\n        if embedding is None:\n            embedding = self._get_entity_embedding(name, description)\n        entity = self._entity_db_model(\n            name=name,\n            entity_type=entity_type,\n            description=description,\n            meta=meta,\n            embedding=embedding,\n        )\n        return self._entity_table.insert(entity)\n\n    def _get_entity_embedding(self, name: str, description: str) -> list[float]:\n        embedding_str = f\"{name}: {description}\"\n        return self._embedding_model.get_text_embedding(embedding_str)\n\n    def find_or_create_entity(\n        self,\n        name: str,\n        entity_type: EntityType = EntityType.original,\n        description: Optional[str] = None,\n        meta: Optional[dict] = None,\n        embedding: Optional[Any] = None,\n    ) -> Entity:\n        query_embedding = self._get_entity_embedding(name, description)\n        query = QueryBundle(query_embedding=query_embedding)\n        nearest_entity = self.search_entities(\n            query, top_k=1, distance_threshold=self._entity_distance_threshold\n        )\n        if len(nearest_entity) != 0:\n            return nearest_entity[0][0]\n        else:\n            return self.create_entity(\n                name=name,\n                entity_type=entity_type,\n                description=description,\n                meta=meta,\n                embedding=embedding,\n            )\n\n    def update_entity(self, entity: Entity | UUID, update: EntityUpdate) -> Entity:\n        if isinstance(entity, UUID):\n            entity = self.get_entity(entity)\n\n        update_dict = update.model_dump(exclude_none=True)\n        if update.embedding is None:\n            update_dict[\"embedding\"] = self._get_entity_embedding(\n                entity.name, entity.description\n            )\n\n        self._entity_table.update(values=update_dict, filters={\"id\": entity.id})\n        # FIXME: pytidb should return the updated entity.\n        entity = self._entity_table.get(entity.id)\n\n        # Update connected relationships.\n        connected_relationships = self.list_relationships(\n            filters=RelationshipFilters(\n                entity_id=entity.id,\n            )\n        )\n        for relationship in connected_relationships:\n            self.update_relationship(relationship, RelationshipUpdate())\n\n        return entity\n\n    def delete_entity(self, entity_id: UUID) -> None:\n        with self._db.session():\n            # Delete all relationships connected to the entity.\n            self._relationship_table.delete({\"source_entity_id\": entity_id})\n            self._relationship_table.delete({\"target_entity_id\": entity_id})\n\n            # Delete the entity.\n            self._entity_table.delete({\"id\": entity_id})\n\n    def delete_orphan_entities(self):\n        raise NotImplementedError()\n\n    # Entity Degree Operations\n\n    def calc_entity_out_degree(self, entity_id: UUID) -> int:\n        stmt = select(func.count(self._relationship_db_model.id)).where(\n            self._relationship_db_model.source_entity_id == entity_id\n        )\n        return self._db.query(stmt).scalar()\n\n    def calc_entity_in_degree(self, entity_id: UUID) -> int:\n        stmt = select(func.count(self._relationship_db_model.id)).where(\n            self._relationship_db_model.target_entity_id == entity_id\n        )\n        return self._db.query(stmt).scalar()\n\n    def calc_entity_degree(self, entity_id: UUID) -> int:\n        stmt = select(func.count(self._relationship_db_model.id)).where(\n            or_(\n                self._relationship_db_model.target_entity_id == entity_id,\n                self._relationship_db_model.source_entity_id == entity_id,\n            )\n        )\n        return self._db.query(stmt).scalar()\n\n    def calc_entities_degrees(\n        self, entity_ids: Collection[UUID]\n    ) -> Dict[UUID, EntityDegree]:\n        entity_table_name = self._entity_table.table_name\n        relationship_table_name = self._relationship_table.table_name\n        stmt = f\"\"\"\n            SELECT\n                e.id as id,\n                COALESCE(SUM(CASE WHEN r.target_entity_id = e.id THEN 1 ELSE 0 END), 0) AS in_degree,\n                COALESCE(SUM(CASE WHEN r.source_entity_id = e.id THEN 1 ELSE 0 END), 0) AS out_degree,\n                COALESCE(COUNT(e.id), 0) AS degree\n            FROM {entity_table_name} e\n            LEFT JOIN {relationship_table_name} r ON e.id = r.source_entity_id OR e.id = r.target_entity_id\n            WHERE e.id IN :entity_ids\n            GROUP BY e.id\n        \"\"\"\n        results = self._db.query(\n            stmt, {\"entity_ids\": [entity_id.hex for entity_id in entity_ids]}\n        ).to_list()\n        return {\n            UUID(item[\"id\"]): EntityDegree(\n                in_degree=item[\"in_degree\"],\n                out_degree=item[\"out_degree\"],\n                degrees=item[\"degree\"],\n            )\n            for item in results\n        }\n\n    # Relationship Basic Operations\n\n    def get_relationship(self, relationship_id: UUID) -> Relationship:\n        return self._relationship_table.get(relationship_id)\n\n    def list_relationships(\n        self, filters: RelationshipFilters = RelationshipFilters(), **kwargs\n    ) -> List[Relationship]:\n        if isinstance(kwargs, dict):\n            filters = filters.model_copy(update=kwargs)\n        filter_dict = self._convert_relationship_filters(filters)\n        return self._relationship_table.query(filter_dict)\n\n    def search_relationships(\n        self,\n        query: QueryBundle,\n        top_k: int = 10,\n        num_candidate: Optional[int] = None,\n        distance_threshold: Optional[float] = None,\n        distance_range: Optional[Tuple[float, float]] = None,\n        filters: Optional[RelationshipFilters] = None,\n    ) -> List[Tuple[Relationship, float]]:\n        filter_dict = self._convert_relationship_filters(filters)\n        results = (\n            self._relationship_table.search(query.query_embedding or query.query_str)\n            .num_candidate(num_candidate or top_k * 10)\n            .filter(filter_dict)\n            .distance_threshold(distance_threshold)\n            .distance_range(distance_range[0], distance_range[1])\n            .limit(top_k)\n            .to_pydantic()\n        )\n\n        # FIXME: pytidb should return the relationship field: target_entity, source_entity.\n        entity_ids = [item.hit.target_entity_id for item in results]\n        entity_ids.extend([item.hit.source_entity_id for item in results])\n        entities = self.list_entities(filters=EntityFilters(entity_id=entity_ids))\n        entity_map = {entity.id: entity for entity in entities}\n        for item in results:\n            item.hit.target_entity = entity_map[item.hit.target_entity_id]\n            item.hit.source_entity = entity_map[item.hit.source_entity_id]\n\n        return [(item.hit, item.score) for item in results]\n\n    def _convert_relationship_filters(self, filters: RelationshipFilters) -> dict:\n        filter_dict = {}\n\n        if filters.entity_id:\n            if isinstance(filters.entity_id, list):\n                if len(filters.entity_id) != 0:\n                    filter_dict[\"$or\"] = [\n                        {\"target_entity_id\": {\"$in\": filters.entity_id}},\n                        {\"source_entity_id\": {\"$in\": filters.entity_id}},\n                    ]\n            else:\n                filter_dict[\"$or\"] = [\n                    {\"target_entity_id\": {\"$eq\": filters.entity_id}},\n                    {\"source_entity_id\": {\"$eq\": filters.entity_id}},\n                ]\n\n        if filters.source_entity_id:\n            if isinstance(filters.source_entity_id, list):\n                if len(filters.source_entity_id) != 0:\n                    filter_dict[\"$or\"] = [\n                        {\"source_entity_id\": {\"$in\": filters.source_entity_id}}\n                    ]\n            else:\n                filter_dict[\"$or\"] = [\n                    {\"source_entity_id\": {\"$eq\": filters.source_entity_id}}\n                ]\n\n        if filters.target_entity_id:\n            if isinstance(filters.target_entity_id, list):\n                if len(filters.target_entity_id) != 0:\n                    filter_dict[\"$or\"] = [\n                        {\"target_entity_id\": {\"$in\": filters.target_entity_id}}\n                    ]\n            else:\n                filter_dict[\"$or\"] = [\n                    {\"target_entity_id\": {\"$eq\": filters.target_entity_id}}\n                ]\n\n        if filters.relationship_id:\n            if isinstance(filters.relationship_id, list):\n                if len(filters.relationship_id) != 0:\n                    filter_dict[\"id\"] = {\"$in\": filters.relationship_id}\n            else:\n                filter_dict[\"id\"] = {\"$eq\": filters.relationship_id}\n\n        if (\n            filters.exclude_relationship_ids\n            and len(filters.exclude_relationship_ids) != 0\n        ):\n            filter_dict[\"id\"] = {\"$nin\": filters.exclude_relationship_ids}\n\n        if filters.document_id:\n            if isinstance(filters.document_id, list):\n                if len(filters.document_id) != 0:\n                    filter_dict[\"document_id\"] = {\"$in\": filters.document_id}\n            else:\n                filter_dict[\"document_id\"] = {\"$eq\": filters.document_id}\n\n        if filters.chunk_id:\n            if isinstance(filters.chunk_id, list):\n                if len(filters.chunk_id) != 0:\n                    filter_dict[\"chunk_id\"] = {\"$in\": filters.chunk_id}\n            else:\n                filter_dict[\"chunk_id\"] = {\"$eq\": filters.chunk_id}\n\n        if filters.metadata:\n            for key, value in filters.metadata.items():\n                op = \"$in\" if isinstance(value, list) else \"$eq\"\n                filter_dict[f\"meta.{key}\"] = {op: value}\n\n        return filter_dict\n\n    def create_relationship(\n        self,\n        source_entity: Entity | UUID,\n        target_entity: Entity | UUID,\n        description: Optional[str] = None,\n        meta: Optional[dict] = {},\n        embedding: Optional[Any] = None,\n    ) -> Relationship:\n        \"\"\"\n        Create a relationship between two entities.\n        \"\"\"\n        if isinstance(source_entity, UUID):\n            source_entity = self.get_entity(source_entity)\n        if isinstance(target_entity, UUID):\n            target_entity = self.get_entity(target_entity)\n\n        if embedding is None:\n            embedding = self._get_relationship_embedding(\n                source_entity.name,\n                source_entity.description,\n                target_entity.name,\n                target_entity.description,\n                description,\n            )\n\n        relationship = self._relationship_db_model(\n            source_entity_id=source_entity.id,\n            target_entity_id=target_entity.id,\n            description=description,\n            meta=meta,\n            embedding=embedding,\n        )\n        return self._relationship_table.insert(relationship)\n\n    def _get_relationship_embedding(\n        self,\n        source_entity_name: str,\n        source_entity_description,\n        target_entity_name: str,\n        target_entity_description: str,\n        relationship_desc: str,\n    ) -> List[float]:\n        embedding_str = (\n            f\"{source_entity_name}({source_entity_description}) -> \"\n            f\"{relationship_desc} -> {target_entity_name}({target_entity_description}) \"\n        )\n        return self._embedding_model.get_text_embedding(embedding_str)\n\n    def update_relationship(\n        self, relationship: Relationship | UUID, update: RelationshipUpdate\n    ) -> Relationship:\n        if isinstance(relationship, UUID):\n            relationship = self.get_relationship(relationship)\n\n        update_dict = update.model_dump()\n        if update.embedding is None:\n            update_dict[\"embedding\"] = self._get_relationship_embedding(\n                relationship.source_entity.name,\n                relationship.source_entity.description,\n                relationship.target_entity.name,\n                relationship.target_entity.description,\n                relationship.description,\n            )\n\n        self._relationship_table.update(\n            values=update_dict, filters={\"id\": relationship.id}\n        )\n        # FIXME: pytidb should return the updated relationship.\n        relationship = self._relationship_table.get(relationship.id)\n        return relationship\n\n    def delete_relationship(self, relationship_id: UUID):\n        return self._relationship_table.delete(filters={\"id\": relationship_id})\n\n    # Knowledge Graph Operations\n\n    def add(self, knowledge_graph: KnowledgeGraphCreate) -> Optional[KnowledgeGraph]:\n        with self._db.session():\n            # Create or find entities\n            entity_map = {}\n            for entity in knowledge_graph.entities:\n                created_entity = self.find_or_create_entity(\n                    entity_type=EntityType.original,\n                    name=entity.name,\n                    description=entity.description,\n                    meta=entity.meta,\n                )\n                entity_map[entity.name] = created_entity\n            entities = list(entity_map.values())\n\n            # Create relationships\n            relationships = []\n            for rel in knowledge_graph.relationships:\n                logger.info(\"Saving relationship: %s\", rel.description)\n                source_entity = entity_map.get(rel.source_entity_name)\n                if not source_entity:\n                    logger.warning(\n                        \"Source entity not found for relationship: %s\", str(rel)\n                    )\n                    continue\n\n                target_entity = entity_map.get(rel.target_entity_name)\n                if not target_entity:\n                    logger.warning(\n                        \"Target entity not found for relationship: %s\", str(rel)\n                    )\n                    continue\n\n                relationship = self.create_relationship(\n                    source_entity=source_entity,\n                    target_entity=target_entity,\n                    description=rel.description,\n                    meta=rel.meta,\n                )\n                relationships.append(relationship)\n\n        return KnowledgeGraph(\n            entities=[Entity(**entity.model_dump()) for entity in entities],\n            relationships=[\n                Relationship(**relationship.model_dump())\n                for relationship in relationships\n            ],\n        )\n\n    # Graph Store Operations\n\n    def reset(self):\n        with self._db.session():\n            self._db.execute(\"SET FOREIGN_KEY_CHECKS = 0\")\n            self._relationship_table.truncate()\n            self._entity_table.truncate()\n            self._db.execute(\"SET FOREIGN_KEY_CHECKS = 1\")\n\n    def recreate(self):\n        self._db.drop_table(self._relationship_table.table_name)\n        self._db.drop_table(self._entity_table.table_name)\n        self._entity_table = self._db.create_table(schema=self._entity_db_model)\n        self._relationship_table = self._db.create_table(\n            schema=self._relationship_db_model\n        )\n\n    def drop(self):\n        self._db.drop_table(self._relationship_table.table_name)\n        self._db.drop_table(self._entity_table.table_name)\n"
  },
  {
    "path": "core/autoflow/storage/graph_store/types.py",
    "content": "from enum import Enum\nfrom typing import List, Optional, Dict, Any\nfrom uuid import UUID\nfrom datetime import datetime\n\nfrom pydantic import BaseModel, model_validator, Field\n\n\n# Entity\n\n\nclass EntityType(str, Enum):\n    original = \"original\"\n    synopsis = \"synopsis\"\n\n    def __str__(self):\n        return self.value\n\n\nclass Entity(BaseModel):\n    id: Optional[UUID]\n    entity_type: Optional[EntityType] = Field(\n        description=\"Type of the entity\", default=EntityType.original\n    )\n    name: str = Field(description=\"Name of the entity\")\n    description: str = Field(description=\"Description of the entity\")\n    embedding: Optional[Any] = Field(\n        description=\"Embedding of the entity\", default=None\n    )\n    meta: Optional[Dict[str, Any]] = Field(\n        description=\"Metadata of the entity\", default_factory=dict\n    )\n    created_at: Optional[datetime]\n    updated_at: Optional[datetime]\n\n\nclass EntityCreate(BaseModel):\n    entity_type: EntityType = EntityType.original\n    name: Optional[str] = None\n    description: Optional[str] = None\n    meta: Optional[dict] = None\n\n\nclass SynopsisEntityCreate(EntityCreate):\n    topic: str\n    entities: List[int] = Field(description=\"The id list of the related entities\")\n\n    @model_validator(mode=\"after\")\n    def validate_entities(self):\n        if len(self.entities) == 0:\n            raise ValueError(\"Entities list should not be empty\")\n        return self\n\n\nclass EntityFilters(BaseModel):\n    entity_type: Optional[EntityType] = None\n    entity_id: Optional[UUID | List[UUID]] = None\n\n\nclass EntityUpdate(BaseModel):\n    name: Optional[str] = None\n    description: Optional[str] = None\n    meta: Optional[dict] = None\n    embedding: Optional[list[float]] = None\n\n\nclass EntityDegree(BaseModel):\n    out_degree: int = 0\n    in_degree: int = 0\n    degrees: int = 0\n\n\n# Relationship\n\n\nclass Relationship(BaseModel):\n    id: Optional[UUID]\n    source_entity_id: Optional[UUID] = Field(default=None)\n    source_entity: Optional[Entity] = Field(default=None)\n    target_entity_id: Optional[UUID] = Field(default=None)\n    target_entity: Optional[Entity] = Field(default=None)\n    description: str = Field(description=\"Description of the relationship\")\n    weight: Optional[float] = Field(default=0, description=\"Weight of the relationship\")\n    meta: Optional[Dict[str, Any]] = Field(\n        description=\"Metadata of the relationship\", default_factory=dict\n    )\n    embedding: Optional[Any] = Field(\n        description=\"Embedding of the relationship\", default=None\n    )\n    created_at: Optional[datetime] = Field(default=None)\n    updated_at: Optional[datetime] = Field(default=None)\n\n\nclass RelationshipCreate(BaseModel):\n    source_entity_name: str\n    target_entity_name: str\n    description: str\n    meta: Dict[str, Any] = Field(default_factory=dict)\n    weight: Optional[float] = Field(default=0)\n    chunk_id: Optional[UUID] = Field(default=None)\n    document_id: Optional[UUID] = Field(default=None)\n\n\nclass RelationshipUpdate(BaseModel):\n    description: Optional[str] = None\n    embedding: Optional[list[float]] = None\n\n\nclass RelationshipFilters(BaseModel):\n    entity_id: Optional[UUID | List[UUID]] = Field(\n        description=\"Filter by the entity connected to the relationship\",\n        default=None,\n    )\n    target_entity_id: Optional[UUID | List[UUID]] = Field(\n        description=\"Filter by the target entity of the relationship\",\n        default=None,\n    )\n    source_entity_id: Optional[UUID | List[UUID]] = Field(\n        description=\"Filter by the source entity of the relationship\",\n        default=None,\n    )\n    chunk_id: Optional[UUID | List[UUID]] = Field(\n        description=\"Filter by the chunk which the relationship belongs to\",\n        default=None,\n    )\n    document_id: Optional[UUID | List[UUID]] = Field(\n        description=\"Filter by the document which the relationship belongs to\",\n        default=None,\n    )\n    relationship_id: Optional[UUID | List[UUID]] = Field(\n        description=\"Filter by the id of the relationship\",\n        default=None,\n    )\n    exclude_relationship_ids: Optional[List[UUID]] = Field(\n        description=\"Exclude the relationships by the id\",\n        default=None,\n    )\n    metadata: Optional[Dict[str, Any]] = Field(\n        description=\"Filter by the metadata of the relationship\",\n        default=None,\n    )\n\n\n# Knowledge Graph\n\n\nclass KnowledgeGraph(BaseModel):\n    entities: List[Entity] = Field(default_factory=list)\n    relationships: List[Relationship] = Field(default_factory=list)\n\n\n# Knowledge Graph Create\n\n\nclass KnowledgeGraphCreate(BaseModel):\n    entities: List[EntityCreate]\n    relationships: List[RelationshipCreate]\n"
  },
  {
    "path": "core/autoflow/storage/types.py",
    "content": "from typing import Optional, List\n\nfrom pydantic import BaseModel\n\n\nclass QueryBundle(BaseModel):\n    query_str: Optional[str] = None\n    query_embedding: Optional[List[float]] = None\n"
  },
  {
    "path": "core/autoflow/types.py",
    "content": "from typing import Literal\n\nfrom llama_index.core.schema import BaseComponent\n\nBaseComponent = BaseComponent\n\nSearchMode = Literal[\"vector\", \"fulltext\", \"hybird\"]\n"
  },
  {
    "path": "core/autoflow/utils/hash.py",
    "content": "import hashlib\nfrom typing import Optional\n\n\ndef sha256(text: Optional[str]) -> Optional[str]:\n    return hashlib.sha256(text.encode(\"utf-8\")).hexdigest() if text else None\n"
  },
  {
    "path": "core/autoflow/utils/uuid6.py",
    "content": "r\"\"\"UUID draft version objects (universally unique identifiers).\nThis module provides the functions uuid6() and uuid7() for\ngenerating version 6 and 7 UUIDs as specified in\nhttps://github.com/uuid6/uuid6-ietf-draft.\n\nRepo: https://github.com/oittaa/uuid6-python\n\nCopy from: https://github.com/jonra1993/fastapi-alembic-sqlmodel-async/blob/main/backend/app/app/utils/uuid6.py\n\"\"\"\n\nimport secrets\nimport time\nfrom typing import Optional\nimport uuid\n\n\nclass UUID(uuid.UUID):\n    r\"\"\"UUID draft version objects\"\"\"\n\n    def __init__(\n        self,\n        hex: Optional[str] = None,\n        bytes: Optional[bytes] = None,\n        bytes_le: Optional[bytes] = None,\n        fields: Optional[tuple[int, int, int, int, int, int]] = None,\n        int: Optional[int] = None,\n        version: Optional[int] = None,\n        *,\n        is_safe=uuid.SafeUUID.unknown,\n    ) -> None:\n        r\"\"\"Create a UUID.\"\"\"\n\n        if int is None or [hex, bytes, bytes_le, fields].count(None) != 4:\n            super().__init__(\n                hex=hex,\n                bytes=bytes,\n                bytes_le=bytes_le,\n                fields=fields,\n                int=int,\n                version=version,\n                is_safe=is_safe,\n            )\n        if int is not None and not 0 <= int < 1 << 128:\n            raise ValueError(\"int is out of range (need a 128-bit value)\")\n        if version is not None:\n            if not 6 <= version <= 7:\n                raise ValueError(\"illegal version number\")\n            # Set the variant to RFC 4122.\n            int &= ~(0xC000 << 48)\n            int |= 0x8000 << 48\n            # Set the version number.\n            int &= ~(0xF000 << 64)\n            int |= version << 76\n        super().__init__(int=int, is_safe=is_safe)\n\n    @property\n    def subsec(self) -> int:\n        return ((self.int >> 64) & 0x0FFF) << 8 | ((self.int >> 54) & 0xFF)\n\n    @property\n    def time(self) -> int:\n        if self.version == 6:\n            return (\n                (self.time_low << 28)\n                | (self.time_mid << 12)\n                | (self.time_hi_version & 0x0FFF)\n            )\n        if self.version == 7:\n            return (self.int >> 80) * 10**6 + _subsec_decode(self.subsec)\n        return super().time\n\n\ndef _subsec_decode(value: int) -> int:\n    return -(-value * 10**6 // 2**20)\n\n\ndef _subsec_encode(value: int) -> int:\n    return value * 2**20 // 10**6\n\n\n_last_v6_timestamp = None\n_last_v7_timestamp = None\n\n\ndef uuid6(clock_seq: int = None) -> UUID:\n    r\"\"\"UUID version 6 is a field-compatible version of UUIDv1, reordered for\n    improved DB locality.  It is expected that UUIDv6 will primarily be\n    used in contexts where there are existing v1 UUIDs.  Systems that do\n    not involve legacy UUIDv1 SHOULD consider using UUIDv7 instead.\n    If 'clock_seq' is given, it is used as the sequence number;\n    otherwise a random 14-bit sequence number is chosen.\"\"\"\n\n    global _last_v6_timestamp\n\n    nanoseconds = time.time_ns()\n    # 0x01b21dd213814000 is the number of 100-ns intervals between the\n    # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.\n    timestamp = nanoseconds // 100 + 0x01B21DD213814000\n    if _last_v6_timestamp is not None and timestamp <= _last_v6_timestamp:\n        timestamp = _last_v6_timestamp + 1\n    _last_v6_timestamp = timestamp\n    if clock_seq is None:\n        clock_seq = secrets.randbits(14)  # instead of stable storage\n    node = secrets.randbits(48)\n    time_high_and_time_mid = (timestamp >> 12) & 0xFFFFFFFFFFFF\n    time_low_and_version = timestamp & 0x0FFF\n    uuid_int = time_high_and_time_mid << 80\n    uuid_int |= time_low_and_version << 64\n    uuid_int |= (clock_seq & 0x3FFF) << 48\n    uuid_int |= node\n    return UUID(int=uuid_int, version=6)\n\n\ndef uuid7() -> UUID:\n    r\"\"\"UUID version 7 features a time-ordered value field derived from the\n    widely implemented and well known Unix Epoch timestamp source, the\n    number of milliseconds seconds since midnight 1 Jan 1970 UTC, leap\n    seconds excluded.  As well as improved entropy characteristics over\n    versions 1 or 6.\n    Implementations SHOULD utilize UUID version 7 over UUID version 1 and\n    6 if possible.\"\"\"\n\n    global _last_v7_timestamp\n\n    nanoseconds = time.time_ns()\n    if _last_v7_timestamp is not None and nanoseconds <= _last_v7_timestamp:\n        nanoseconds = _last_v7_timestamp + 1\n    _last_v7_timestamp = nanoseconds\n    timestamp_ms, timestamp_ns = divmod(nanoseconds, 10**6)\n    subsec = _subsec_encode(timestamp_ns)\n    subsec_a = subsec >> 8\n    subsec_b = subsec & 0xFF\n    rand = secrets.randbits(54)\n    uuid_int = (timestamp_ms & 0xFFFFFFFFFFFF) << 80\n    uuid_int |= subsec_a << 64\n    uuid_int |= subsec_b << 54\n    uuid_int |= rand\n    return UUID(int=uuid_int, version=7)\n"
  },
  {
    "path": "core/autoflow/utils/vector.py",
    "content": "def cosine_distance(v1, v2):\n    import numpy as np\n\n    return 1 - np.dot(v1, v2) / (np.linalg.norm(v1) * np.linalg.norm(v2))\n"
  },
  {
    "path": "core/examples/README.md",
    "content": "# Autoflow Examples\n\nThis folder contains examples of how to use Autoflow."
  },
  {
    "path": "core/examples/quickstart/fixtures/analyze-slow-queries.md",
    "content": "---\ntitle: Analyze Slow Queries\nsummary: Learn how to locate and analyze slow queries.\n---\n\n# Analyze Slow Queries\n\nTo address the issue of slow queries, you need to take the following two steps:\n\n1. Among many queries, identify which type of queries are slow.\n2. Analyze why this type of queries are slow.\n\nYou can easily perform step 1 using the [slow query log](/dashboard/dashboard-slow-query.md) and the [statement summary table](/statement-summary-tables.md) features. It is recommended to use [TiDB Dashboard](/dashboard/dashboard-intro.md), which integrates the two features and directly displays the slow queries in your browser. \n\nThis document focuses on how to perform step 2 - analyze why this type of queries are slow.\n\nGenerally, slow queries have the following major causes:\n\n- Optimizer issues, such as wrong index selected, wrong join type or sequence selected.\n- System issues. All issues not caused by the optimizer are system issues. For example, a busy TiKV instance processes requests slowly; outdated Region information causes slow queries.\n\nIn actual situations, optimizer issues might cause system issues. For example, for a certain type of queries, the optimizer uses a full table scan instead of the index. As a result, the SQL queries consume many resources, which causes the CPU usage of some TiKV instances to soar. This seems like a system issue, but in essence, it is an optimizer issue.\n\nTo identify system issues is relatively simple. To analyze optimizer issues, you need to determine whether the execution plan is reasonable or not. Therefore, it is recommended to analyze slow queries by following these procedures:\n\n1. Identify the performance bottleneck of the query, that is, the time-consuming part of the query process.\n2. Analyze the system issues: analyze the possible causes according to the query bottleneck and the monitoring/log information of that time.\n3. Analyze the optimizer issues: analyze whether there is a better execution plan.\n\nThe procedures above are explained in the following sections.\n\n## Identify the performance bottleneck of the query\n\nFirst, you need to have a general understanding of the query process. The key stages of the query execution process in TiDB are illustrated in [TiDB performance map](/media/performance-map.png).\n\nYou can get the duration information using the following methods:\n\n- [Slow log](/identify-slow-queries.md). It is recommended to view the slow log in [TiDB Dashboard](/dashboard/dashboard-overview.md).\n- [`EXPLAIN ANALYZE` statement](/sql-statements/sql-statement-explain-analyze.md).\n\nThe methods above are different in the following aspects:\n\n- The slow log records the duration of almost all stages of a SQL execution, from parsing to returning results, and is relatively comprehensive (you can query and analyze the slow log in TiDB Dashboard in an intuitive way).\n- By executing `EXPLAIN ANALYZE`, you can learn the time consumption of each operator in an actual SQL execution. The results have more detailed statistics of the execution duration.\n\nIn summary, the slow log and `EXPLAIN ANALYZE` statements help you determine the SQL query is slow in which component (TiDB or TiKV) at which stage of the execution. Therefore, you can accurately identify the performance bottleneck of the query.\n\nIn addition, since v4.0.3, the `Plan` field in the slow log also includes the SQL execution information, which is the result of `EXPLAIN ANALYZE`. So you can find all information of SQL duration in the slow log.\n\n## Analyze system issues\n\nSystem issues can be divided into the following types according to different execution stages of a SQL statement:\n\n1. TiKV is slow in data processing. For example, the TiKV coprocessor processes data slowly.\n2. TiDB is slow in execution. For example, a `Join` operator processes data slowly.\n3. Other key stages are slow. For example, getting the timestamp takes a long time.\n\nFor each slow query, first determine to which type the query belongs, and then analyze it in detail.\n\n### TiKV is slow in data processing\n\nIf TiKV is slow in data processing, you can easily identify it in the result of `EXPLAIN ANALYZE`. In the following example, `StreamAgg_8` and `TableFullScan_15`, two `tikv-task`s (as indicated by `cop[tikv]` in the `task` column), take `170ms` to execute. After subtracting `170ms`, the execution time of TiDB operators account for a very small proportion of the total execution time. This indicates that the bottleneck is in TiKV.\n\n```sql\n+----------------------------+---------+---------+-----------+---------------+------------------------------------------------------------------------------+---------------------------------+-----------+------+\n| id                         | estRows | actRows | task      | access object | execution info                                                               | operator info                   | memory    | disk |\n+----------------------------+---------+---------+-----------+---------------+------------------------------------------------------------------------------+---------------------------------+-----------+------+\n| StreamAgg_16               | 1.00    | 1       | root      |               | time:170.08572ms, loops:2                                                     | funcs:count(Column#5)->Column#3 | 372 Bytes | N/A  |\n| └─TableReader_17           | 1.00    | 1       | root      |               | time:170.080369ms, loops:2, rpc num: 1, rpc time:17.023347ms, proc keys:28672 | data:StreamAgg_8                | 202 Bytes | N/A  |\n|   └─StreamAgg_8            | 1.00    | 1       | cop[tikv] |               | time:170ms, loops:29                                                          | funcs:count(1)->Column#5        | N/A       | N/A  |\n|     └─TableFullScan_15     | 7.00    | 28672   | cop[tikv] | table:t       | time:170ms, loops:29                                                          | keep order:false, stats:pseudo  | N/A       | N/A  |\n+----------------------------+---------+---------+-----------+---------------+------------------------------------------------------------------------------+---------------------------------+-----------+------\n```\n\nIn addition, the `Cop_process` and `Cop_wait` fields in the slow log can also help your analysis. In the following example, the total duration of the query is around `180.85ms`, and the largest `coptask` takes `171ms`. This indicates that the bottleneck of this query is on the TiKV side.\n\nFor the description of each field in the slow log, see [fields description](/identify-slow-queries.md#fields-description).\n\n```log\n# Query_time: 0.18085\n...\n# Num_cop_tasks: 1\n# Cop_process: Avg_time: 170ms P90_time: 170ms Max_time: 170ms Max_addr: 10.6.131.78\n# Cop_wait: Avg_time: 1ms P90_time: 1ms Max_time: 1ms Max_Addr: 10.6.131.78\n```\n\nAfter identifying that TiKV is the bottleneck, you can find out the cause as described in the following sections.\n\n#### TiKV instance is busy\n\nDuring the execution of a SQL statement, TiDB might fetch data from multiple TiKV instances. If one TiKV instance responds slowly, the overall SQL execution speed is slowed down.\n\nThe `Cop_wait` field in the slow log can help you determine this cause.\n\n```log\n# Cop_wait: Avg_time: 1ms P90_time: 2ms Max_time: 110ms Max_Addr: 10.6.131.78\n```\n\nThe log above shows that a `cop-task` sent to the `10.6.131.78` instance waits `110ms` before being executed. It indicates that this instance is busy. You can check the CPU monitoring of that time to confirm the cause.\n\n#### Too many outdated keys\n\nA TiKV instance has much outdated data, which needs to be cleaned up for data scan. This impacts the processing speed.\n\nCheck `Total_keys` and `Processed_keys`. If they are greatly different, the TiKV instance has too many keys of the older versions.\n\n```\n...\n# Total_keys: 2215187529 Processed_keys: 1108056368\n...\n```\n\n### Other key stages are slow\n\n#### Slow in getting timestamps\n\nYou can compare `Wait_TS` and `Query_time` in the slow log. The timestamps are prefetched, so generally `Wait_TS` should be low.\n\n```\n# Query_time: 0.0300000\n...\n# Wait_TS: 0.02500000\n```\n\n#### Outdated Region information\n\nRegion information on the TiDB side might be outdated. In this situation, TiKV might return the `regionMiss` error. Then TiDB gets the Region information from PD again, which is reflected in the `Cop_backoff` information. Both the failed times and the total duration are recorded.\n\n```\n# Cop_backoff_regionMiss_total_times: 200 Cop_backoff_regionMiss_total_time: 0.2 Cop_backoff_regionMiss_max_time: 0.2 Cop_backoff_regionMiss_max_addr: 127.0.0.1 Cop_backoff_regionMiss_avg_time: 0.2 Cop_backoff_regionMiss_p90_time: 0.2\n# Cop_backoff_rpcPD_total_times: 200 Cop_backoff_rpcPD_total_time: 0.2 Cop_backoff_rpcPD_max_time: 0.2 Cop_backoff_rpcPD_max_addr: 127.0.0.1 Cop_backoff_rpcPD_avg_time: 0.2 Cop_backoff_rpcPD_p90_time: 0.2\n```\n\n#### Subqueries are executed in advance\n\nFor statements with non-correlated subqueries, the subquery part might be executed in advance. For example, in `select * from t1 where a = (select max(a) from t2)`, the `select max(a) from t2` part might be executed in advance in the optimization stage. The result of `EXPLAIN ANALYZE` does not show the duration of this type of subqueries.\n\n```sql\nmysql> explain analyze select count(*) from t where a=(select max(t1.a) from t t1, t t2 where t1.a=t2.a);\n+------------------------------+----------+---------+-----------+---------------+--------------------------+----------------------------------+-----------+------+\n| id                           | estRows  | actRows | task      | access object | execution info           | operator info                    | memory    | disk |\n+------------------------------+----------+---------+-----------+---------------+--------------------------+----------------------------------+-----------+------+\n| StreamAgg_59                 | 1.00     | 1       | root      |               | time:4.69267ms, loops:2  | funcs:count(Column#10)->Column#8 | 372 Bytes | N/A  |\n| └─TableReader_60             | 1.00     | 1       | root      |               | time:4.690428ms, loops:2 | data:StreamAgg_48                | 141 Bytes | N/A  |\n|   └─StreamAgg_48             | 1.00     |         | cop[tikv] |               | time:0ns, loops:0        | funcs:count(1)->Column#10        | N/A       | N/A  |\n|     └─Selection_58           | 16384.00 |         | cop[tikv] |               | time:0ns, loops:0        | eq(test.t.a, 1)                  | N/A       | N/A  |\n|       └─TableFullScan_57     | 16384.00 | -1      | cop[tikv] | table:t       | time:0s, loops:0         | keep order:false                 | N/A       | N/A  |\n+------------------------------+----------+---------+-----------+---------------+--------------------------+----------------------------------+-----------+------+\n5 rows in set (7.77 sec)\n```\n\nBut you can identify this type of subquery execution in the slow log:\n\n```\n# Query_time: 7.770634843\n...\n# Rewrite_time: 7.765673663 Preproc_subqueries: 1 Preproc_subqueries_time: 7.765231874\n```\n\nFrom log record above, you can see that a subquery is executed in advance and takes `7.76s`.\n\n### TiDB is slow in execution\n\nAssume that the execution plan in TiDB is correct but the execution is slow. To solve this type of issue, you can adjust parameters or use the hint according to the result of `EXPLAIN ANALYZE` for the SQL statement.\n\nIf the execution plan is incorrect, see the [Analyze optimizer issues](#analyze-optimizer-issues) section.\n\n#### Low concurrency\n\nIf the bottleneck is in the operator with concurrency, speed up the execution by adjusting the concurrency. For example:\n\n```sql\nmysql> explain analyze select sum(t1.a) from t t1, t t2 where t1.a=t2.a;\n+----------------------------------+--------------+-----------+-----------+---------------+-------------------------------------------------------------------------------------+------------------------------------------------+------------------+---------+\n| id                               | estRows      | actRows   | task      | access object | execution info                                                                      | operator info                                  | memory           | disk    |\n+----------------------------------+--------------+-----------+-----------+---------------+-------------------------------------------------------------------------------------+------------------------------------------------+------------------+---------+\n| HashAgg_11                       | 1.00         | 1         | root      |               | time:9.666832189s, loops:2, PartialConcurrency:4, FinalConcurrency:4                | funcs:sum(Column#6)->Column#5                  | 322.125 KB       | N/A     |\n| └─Projection_24                  | 268435456.00 | 268435456 | root      |               | time:9.098644711s, loops:262145, Concurrency:4                                      | cast(test.t.a, decimal(65,0) BINARY)->Column#6 | 199 KB           | N/A     |\n|   └─HashJoin_14                  | 268435456.00 | 268435456 | root      |               | time:6.616773501s, loops:262145, Concurrency:5, probe collision:0, build:881.404µs  | inner join, equal:[eq(test.t.a, test.t.a)]     | 131.75 KB        | 0 Bytes |\n|     ├─TableReader_21(Build)      | 16384.00     | 16384     | root      |               | time:6.553717ms, loops:17                                                           | data:Selection_20                              | 33.6318359375 KB | N/A     |\n|     │ └─Selection_20             | 16384.00     |           | cop[tikv] |               | time:0ns, loops:0                                                                   | not(isnull(test.t.a))                          | N/A              | N/A     |\n|     │   └─TableFullScan_19       | 16384.00     | -1        | cop[tikv] | table:t2      | time:0s, loops:0                                                                    | keep order:false                               | N/A              | N/A     |\n|     └─TableReader_18(Probe)      | 16384.00     | 16384     | root      |               | time:6.880923ms, loops:17                                                           | data:Selection_17                              | 33.6318359375 KB | N/A     |\n|       └─Selection_17             | 16384.00     |           | cop[tikv] |               | time:0ns, loops:0                                                                   | not(isnull(test.t.a))                          | N/A              | N/A     |\n|         └─TableFullScan_16       | 16384.00     | -1        | cop[tikv] | table:t1      | time:0s, loops:0                                                                    | keep order:false                               | N/A              | N/A     |\n+----------------------------------+--------------+-----------+-----------+---------------+-------------------------------------------------------------------------------------+------------------------------------------------+------------------+---------+\n9 rows in set (9.67 sec)\n```\n\nAs shown above, `HashJoin_14` and `Projection_24` consume much of the execution time. Consider increasing their concurrency using SQL variables to speed up execution.\n\nAll system variables are documented in [system-variables](/system-variables.md). To increase the concurrency of `HashJoin_14`, you can modify the `tidb_hash_join_concurrency` system variable.\n\n#### Data is spilled to disk\n\nAnother cause of slow execution is disk spill that occurs during execution if the memory limit is reached. You can find out this cause in the execution plan and the slow log:\n\n```sql\n+-------------------------+-----------+---------+-----------+---------------+------------------------------+----------------------+-----------------------+----------------+\n| id                      | estRows   | actRows | task      | access object | execution info               | operator info        | memory                | disk           |\n+-------------------------+-----------+---------+-----------+---------------+------------------------------+----------------------+-----------------------+----------------+\n| Sort_4                  | 462144.00 | 462144  | root      |               | time:2.02848898s, loops:453  | test.t.a             | 149.68795776367188 MB | 219.3203125 MB |\n| └─TableReader_8         | 462144.00 | 462144  | root      |               | time:616.211272ms, loops:453 | data:TableFullScan_7 | 197.49601364135742 MB | N/A            |\n|   └─TableFullScan_7     | 462144.00 | -1      | cop[tikv] | table:t       | time:0s, loops:0             | keep order:false     | N/A                   | N/A            |\n+-------------------------+-----------+---------+-----------+---------------+------------------------------+----------------------+-----------------------+----------------+\n```\n\n```\n...\n# Disk_max: 229974016\n...\n```\n\n#### Join operations with Cartesian product\n\nJoin operations with Cartesian product generate data volume as large as `left child row count * right child row count`. This is inefficient and should be avoided.\n\nThis type of join operations is marked `CARTESIAN` in the execution plan. For example:\n\n```sql\nmysql> explain select * from t t1, t t2 where t1.a>t2.a;\n+------------------------------+-------------+-----------+---------------+---------------------------------------------------------+\n| id                           | estRows     | task      | access object | operator info                                           |\n+------------------------------+-------------+-----------+---------------+---------------------------------------------------------+\n| HashJoin_8                   | 99800100.00 | root      |               | CARTESIAN inner join, other cond:gt(test.t.a, test.t.a) |\n| ├─TableReader_15(Build)      | 9990.00     | root      |               | data:Selection_14                                       |\n| │ └─Selection_14             | 9990.00     | cop[tikv] |               | not(isnull(test.t.a))                                   |\n| │   └─TableFullScan_13       | 10000.00    | cop[tikv] | table:t2      | keep order:false, stats:pseudo                          |\n| └─TableReader_12(Probe)      | 9990.00     | root      |               | data:Selection_11                                       |\n|   └─Selection_11             | 9990.00     | cop[tikv] |               | not(isnull(test.t.a))                                   |\n|     └─TableFullScan_10       | 10000.00    | cop[tikv] | table:t1      | keep order:false, stats:pseudo                          |\n+------------------------------+-------------+-----------+---------------+---------------------------------------------------------+\n```\n\n## Analyze optimizer issues\n\nTo analyze optimizer issues, you need to determine whether the execution plan is reasonable or not. You need to have some understanding of the optimization process and each operator.\n\nFor the following examples, assume that the table schema is `create table t (id int, a int, b int, c int, primary key(id), key(a), key(b, c))`.\n\n1. `select * from t`: There is no filter condition and a full table scan is performed. So the `TableFullScan` operator is used to read data.\n2. `select a from t where a=2`: There is a filter condition and only the index columns are read, so the `IndexReader` operator is used to read data.\n3. `select * from t where a=2`: There is a filter condition for `a` but the `a` index cannot fully cover the data to be read, so the `IndexLookup` operator is used.\n4. `select b from t where c=3`: Without the prefix condition, the multi-column index cannot be used. So the `IndexFullScan` is used.\n5. ...\n\nThe examples above are operators used for data reads. For more operators, see [Understand TiDB Execution Plan](/explain-overview.md).\n\nIn addition, reading [SQL Tuning Overview](/sql-tuning-overview.md) helps you better understand the TiDB optimizer and determine whether the execution plan is reasonable or not.\n\nMost optimizer issues are explained in [SQL Tuning Overview](/sql-tuning-overview.md). For the solutions, see the following documents:\n\n1. [Wrong Index Solution](/wrong-index-solution.md)\n2. [Wrong join order](/join-reorder.md)\n3. [Expressions are not pushed down](/blocklist-control-plan.md)\n"
  },
  {
    "path": "core/examples/quickstart/fixtures/tidb-overview.md",
    "content": "---\ntitle: What is TiDB Self-Managed\nsummary: Learn about the key features and usage scenarios of TiDB.\naliases: ['/docs/dev/key-features/','/tidb/dev/key-features','/docs/dev/overview/']\n---\n\n# What is TiDB Self-Managed\n\n<!-- Localization note for TiDB:\n\n- English: use distributed SQL, and start to emphasize HTAP\n- Chinese: can keep \"NewSQL\" and emphasize one-stop real-time HTAP (\"一栈式实时 HTAP\")\n- Japanese: use NewSQL because it is well-recognized\n\n-->\n\n[TiDB](https://github.com/pingcap/tidb) (/'taɪdiːbi:/, \"Ti\" stands for Titanium) is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. The goal of TiDB is to provide users with a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services. TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.\n\nTiDB Self-Managed is a product option of TiDB, where users or organizations can deploy and manage TiDB on their own infrastructure with complete flexibility. With TiDB Self-Managed, you can enjoy the power of open source, distributed SQL while retaining full control over your environment.\n\nThe following video introduces key features of TiDB.\n\n<iframe width=\"600\" height=\"450\" src=\"https://www.youtube.com/embed/aWBNNPm21zg?enablejsapi=1\" title=\"Why TiDB?\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\" allowfullscreen></iframe>\n\n## Key features\n\n- **Easy horizontal scaling**\n\n  The TiDB architecture design separates computing from storage, letting you scale out or scale in the computing or storage capacity online as needed. The scaling process is transparent to application operations and maintenance staff.\n\n- **Financial-grade high availability**\n\n  Data is stored in multiple replicas, and the Multi-Raft protocol is used to obtain the transaction log. A transaction can only be committed when data has been successfully written into the majority of replicas. This guarantees strong consistency and availability when a minority of replicas go down. You can configure the geographic location and number of replicas as needed to meet different disaster tolerance levels.\n\n- **Real-time HTAP**\n\n  TiDB provides two storage engines: [TiKV](/tikv-overview.md), a row-based storage engine, and [TiFlash](/tiflash/tiflash-overview.md), a columnar storage engine. \n\n  TiFlash uses the Multi-Raft Learner protocol to replicate data from TiKV in real time, ensuring consistent data between the TiKV row-based storage engine and the TiFlash columnar storage engine. TiKV and TiFlash can be deployed on different machines as needed to solve the problem of HTAP resource isolation.\n\n- **Cloud-native distributed database**\n\n  TiDB is a distributed database designed for the cloud, providing flexible scalability, reliability, and security on the cloud platform. Users can elastically scale TiDB to meet the requirements of their changing workloads. In TiDB, each piece of data has at least 3 replicas, which can be scheduled in different cloud availability zones to tolerate the outage of a whole data center. [TiDB Operator](https://docs.pingcap.com/tidb-in-kubernetes/stable/tidb-operator-overview) helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes. [TiDB Cloud](https://pingcap.com/tidb-cloud/), the fully-managed TiDB service, is the easiest, most economical, and most resilient way to unlock the full power of [TiDB in the cloud](https://docs.pingcap.com/tidbcloud/), allowing you to deploy and run TiDB clusters with just a few clicks.\n\n- **Compatible with the MySQL protocol and MySQL ecosystem**\n\n  TiDB is compatible with the MySQL protocol, common features of MySQL, and the MySQL ecosystem. To migrate applications to TiDB, you do not need to change a single line of code in many cases, or only need to modify a small amount of code. In addition, TiDB provides a series of [data migration tools](/ecosystem-tool-user-guide.md) to help easily migrate application data into TiDB.\n\n## See also\n\n- [TiDB Architecture](/tidb-architecture.md)\n- [TiDB Storage](/tidb-storage.md)\n- [TiDB Computing](/tidb-computing.md)\n- [TiDB Scheduling](/tidb-scheduling.md)\n"
  },
  {
    "path": "core/examples/quickstart/quickstart.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"b7e2255151321350\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Autoflow\\n\",\n    \"\\n\",\n    \"Autoflow is a RAG framework supported:\\n\",\n    \"\\n\",\n    \"- Vector Search Based RAG\\n\",\n    \"- Knowledge Graph Based RAG (aka. GraphRAG)\\n\",\n    \"- Knowledge Base and Document Management\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f4c3f49f\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Installation\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"id\": \"7bbed79850462cfe\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:23.724019Z\",\n     \"start_time\": \"2025-04-15T01:31:22.872381Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Note: you may need to restart the kernel to use updated packages.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"%pip install -q autoflow-ai==0.0.2.dev5 ipywidgets\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"0b6d5be6\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Prerequisites\\n\",\n    \"\\n\",\n    \"- Go [tidbcloud.com](https://tidbcloud.com/) or using [tiup playground](https://docs.pingcap.com/tidb/stable/tiup-playground/) to create a free TiDB database cluster.\\n\",\n    \"- Go [OpenAI platform](https://platform.openai.com/api-keys) to create your API key.\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"66ea056f213efcae\",\n   \"metadata\": {},\n   \"source\": [\n    \"#### For Jupyter Notebook\\n\",\n    \"\\n\",\n    \"Configuration can be provided through environment variables, or using `.env`:\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"id\": \"7de9ab2c65f1880e\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:23.740325Z\",\n     \"start_time\": \"2025-04-15T01:31:23.729076Z\"\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"%%bash\\n\",\n    \"\\n\",\n    \"# Check if the .env file is existing.\\n\",\n    \"if [ -f .env ]; then\\n\",\n    \"    exit 0\\n\",\n    \"fi\\n\",\n    \"\\n\",\n    \"# Create .env file with your configuration.\\n\",\n    \"cat > .env <<EOF\\n\",\n    \"TIDB_HOST=localhost\\n\",\n    \"TIDB_PORT=4000\\n\",\n    \"TIDB_USERNAME=root\\n\",\n    \"TIDB_PASSWORD=\\n\",\n    \"TIDB_DATABASE=test\\n\",\n    \"OPENAI_API_KEY='your_openai_api_key'\\n\",\n    \"EOF\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"id\": \"b6cdb4d5\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:23.826376Z\",\n     \"start_time\": \"2025-04-15T01:31:23.820064Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": 3,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"import os\\n\",\n    \"import dotenv\\n\",\n    \"\\n\",\n    \"dotenv.load_dotenv()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"id\": \"6ddd696d4e1d9c78\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:24.238783Z\",\n     \"start_time\": \"2025-04-15T01:31:23.836248Z\"\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"from pandas import DataFrame\\n\",\n    \"from pandas import set_option\\n\",\n    \"\\n\",\n    \"set_option(\\\"display.max_colwidth\\\", None)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f8897854c897bf17\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Quickstart\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"a38fde21\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Init Autoflow\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"id\": \"84f43a00\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:26.467866Z\",\n     \"start_time\": \"2025-04-15T01:31:24.243001Z\"\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"from autoflow import Autoflow\\n\",\n    \"from autoflow.configs.db import DatabaseConfig\\n\",\n    \"from autoflow.configs.main import Config\\n\",\n    \"\\n\",\n    \"af = Autoflow.from_config(\\n\",\n    \"    config=Config(\\n\",\n    \"        db=DatabaseConfig(\\n\",\n    \"            host=os.getenv(\\\"TIDB_HOST\\\"),\\n\",\n    \"            port=int(os.getenv(\\\"TIDB_PORT\\\")),\\n\",\n    \"            username=os.getenv(\\\"TIDB_USERNAME\\\"),\\n\",\n    \"            password=os.getenv(\\\"TIDB_PASSWORD\\\"),\\n\",\n    \"            database=os.getenv(\\\"TIDB_DATABASE\\\"),\\n\",\n    \"            enable_ssl=False,\\n\",\n    \"        )\\n\",\n    \"    )\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"5afea9b7\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Create knowledge base\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"id\": \"9e1ff63c\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:26.512535Z\",\n     \"start_time\": \"2025-04-15T01:31:26.475394Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"application/json\": {\n       \"class_name\": \"KnowledgeBase\",\n       \"description\": \"This is a knowledge base for testing\",\n       \"index_methods\": [\n        \"VECTOR_SEARCH\",\n        \"KNOWLEDGE_GRAPH\"\n       ],\n       \"name\": \"New KB\",\n       \"namespace\": \"quickstart\"\n      },\n      \"text/plain\": [\n       \"<IPython.core.display.JSON object>\"\n      ]\n     },\n     \"execution_count\": 6,\n     \"metadata\": {\n      \"application/json\": {\n       \"expanded\": false,\n       \"root\": \"root\"\n      }\n     },\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"from autoflow.configs.knowledge_base import IndexMethod\\n\",\n    \"from autoflow.models.llms import LLM\\n\",\n    \"from autoflow.models.embedding_models import EmbeddingModel\\n\",\n    \"from IPython.display import JSON\\n\",\n    \"\\n\",\n    \"llm = LLM(\\\"gpt-4o-mini\\\")\\n\",\n    \"embed_model = EmbeddingModel(\\\"text-embedding-3-small\\\")\\n\",\n    \"\\n\",\n    \"kb = af.create_knowledge_base(\\n\",\n    \"    namespace=\\\"quickstart\\\",\\n\",\n    \"    name=\\\"New KB\\\",\\n\",\n    \"    description=\\\"This is a knowledge base for testing\\\",\\n\",\n    \"    index_methods=[IndexMethod.VECTOR_SEARCH, IndexMethod.KNOWLEDGE_GRAPH],\\n\",\n    \"    llm=llm,\\n\",\n    \"    embedding_model=embed_model,\\n\",\n    \")\\n\",\n    \"JSON(kb.model_dump())\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"id\": \"7c217f7f8cf956d8\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:26.736798Z\",\n     \"start_time\": \"2025-04-15T01:31:26.516452Z\"\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# Reset all the data of knowledge base.\\n\",\n    \"kb.reset()\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"d4ac8a82485d4232\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Custom Chunker\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 8,\n   \"id\": \"cddfe61c16ee934e\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:26.744529Z\",\n     \"start_time\": \"2025-04-15T01:31:26.740821Z\"\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"from autoflow.chunkers.text import TextChunker\\n\",\n    \"from autoflow.configs.chunkers.text import TextChunkerConfig\\n\",\n    \"\\n\",\n    \"text_chunker = TextChunker(config=TextChunkerConfig(chunk_size=256, chunk_overlap=20))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"4cfc2d80\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Import documents from files\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 9,\n   \"id\": \"f729326f\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:57.520138Z\",\n     \"start_time\": \"2025-04-15T01:31:26.749953Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>id</th>\\n\",\n       \"      <th>text</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>0196384b-e01b-7e87-8ae3-ceaebc5ea4f0</td>\\n\",\n       \"      <td>---\\\\ntitle: What is TiDB Self-Managed\\\\nsummary: Learn about the key features and usage scenarios of TiDB.\\\\naliases: ['/docs/dev/key-features/','/tidb/dev/key-features','/docs/dev/overview/']\\\\n---\\\\n\\\\n# What is TiDB Self-Managed\\\\n\\\\n&lt;!-- Localization note for TiDB:\\\\n\\\\n- English: use distributed SQL, and start to emphasize HTAP\\\\n- Chinese: can keep \\\"NewSQL\\\" and emphasize one-stop real-time HTAP (\\\"一栈式实时 HTAP\\\")\\\\n- Japanese: use NewSQL because it is well-recognized\\\\n\\\\n--&gt;\\\\n\\\\n[TiDB](https://github.com/pingcap/tidb) (/'taɪdiːbi:/, \\\"Ti\\\" stands for Titanium) is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. The goal of TiDB is to provide users with a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services. TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>0196384b-e01b-7ebc-9a82-f51dac13ba5c</td>\\n\",\n       \"      <td>TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.\\\\n\\\\nTiDB Self-Managed is a product option of TiDB, where users or organizations can deploy and manage TiDB on their own infrastructure with complete flexibility. With TiDB Self-Managed, you can enjoy the power of open source, distributed SQL while retaining full control over your environment.\\\\n\\\\nThe following video introduces key features of TiDB.\\\\n\\\\n&lt;iframe width=\\\"600\\\" height=\\\"450\\\" src=\\\"https://www.youtube.com/embed/aWBNNPm21zg?enablejsapi=1\\\" title=\\\"Why TiDB?\\\" frameborder=\\\"0\\\" allow=\\\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\\\" allowfullscreen&gt;&lt;/iframe&gt;\\\\n\\\\n## Key features\\\\n\\\\n- **Easy horizontal scaling**\\\\n\\\\n  The TiDB architecture design separates computing from storage, letting you scale out or scale in the computing or storage capacity online as needed. The scaling process is transparent to application operations and maintenance staff.\\\\n\\\\n- **Financial-grade high availability**\\\\n\\\\n  Data is stored in multiple replicas, and the Multi-Raft protocol is used to obtain the transaction log.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>0196384b-e01b-7ed4-bf69-f65c861aedf7</td>\\n\",\n       \"      <td>A transaction can only be committed when data has been successfully written into the majority of replicas. This guarantees strong consistency and availability when a minority of replicas go down. You can configure the geographic location and number of replicas as needed to meet different disaster tolerance levels.\\\\n\\\\n- **Real-time HTAP**\\\\n\\\\n  TiDB provides two storage engines: [TiKV](/tikv-overview.md), a row-based storage engine, and [TiFlash](/tiflash/tiflash-overview.md), a columnar storage engine. \\\\n\\\\n  TiFlash uses the Multi-Raft Learner protocol to replicate data from TiKV in real time, ensuring consistent data between the TiKV row-based storage engine and the TiFlash columnar storage engine. TiKV and TiFlash can be deployed on different machines as needed to solve the problem of HTAP resource isolation.\\\\n\\\\n- **Cloud-native distributed database**\\\\n\\\\n  TiDB is a distributed database designed for the cloud, providing flexible scalability, reliability, and security on the cloud platform. Users can elastically scale TiDB to meet the requirements of their changing workloads.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>3</th>\\n\",\n       \"      <td>0196384b-e01b-7ee1-91f0-d57434e5d74d</td>\\n\",\n       \"      <td>Users can elastically scale TiDB to meet the requirements of their changing workloads. In TiDB, each piece of data has at least 3 replicas, which can be scheduled in different cloud availability zones to tolerate the outage of a whole data center. [TiDB Operator](https://docs.pingcap.com/tidb-in-kubernetes/stable/tidb-operator-overview) helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes. [TiDB Cloud](https://pingcap.com/tidb-cloud/), the fully-managed TiDB service, is the easiest, most economical, and most resilient way to unlock the full power of [TiDB in the cloud](https://docs.pingcap.com/tidbcloud/), allowing you to deploy and run TiDB clusters with just a few clicks.\\\\n\\\\n- **Compatible with the MySQL protocol and MySQL ecosystem**\\\\n\\\\n  TiDB is compatible with the MySQL protocol, common features of MySQL, and the MySQL ecosystem. To migrate applications to TiDB, you do not need to change a single line of code in many cases, or only need to modify a small amount of code.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>4</th>\\n\",\n       \"      <td>0196384b-e01b-7eed-a470-c9bcb5a7eccc</td>\\n\",\n       \"      <td>In addition, TiDB provides a series of [data migration tools](/ecosystem-tool-user-guide.md) to help easily migrate application data into TiDB.\\\\n\\\\n## See also\\\\n\\\\n- [TiDB Architecture](/tidb-architecture.md)\\\\n- [TiDB Storage](/tidb-storage.md)\\\\n- [TiDB Computing](/tidb-computing.md)\\\\n- [TiDB Scheduling](/tidb-scheduling.md)</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"                                     id  \\\\\\n\",\n       \"0  0196384b-e01b-7e87-8ae3-ceaebc5ea4f0   \\n\",\n       \"1  0196384b-e01b-7ebc-9a82-f51dac13ba5c   \\n\",\n       \"2  0196384b-e01b-7ed4-bf69-f65c861aedf7   \\n\",\n       \"3  0196384b-e01b-7ee1-91f0-d57434e5d74d   \\n\",\n       \"4  0196384b-e01b-7eed-a470-c9bcb5a7eccc   \\n\",\n       \"\\n\",\n       \"                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                        text  \\n\",\n       \"0                                                                                          ---\\\\ntitle: What is TiDB Self-Managed\\\\nsummary: Learn about the key features and usage scenarios of TiDB.\\\\naliases: ['/docs/dev/key-features/','/tidb/dev/key-features','/docs/dev/overview/']\\\\n---\\\\n\\\\n# What is TiDB Self-Managed\\\\n\\\\n<!-- Localization note for TiDB:\\\\n\\\\n- English: use distributed SQL, and start to emphasize HTAP\\\\n- Chinese: can keep \\\"NewSQL\\\" and emphasize one-stop real-time HTAP (\\\"一栈式实时 HTAP\\\")\\\\n- Japanese: use NewSQL because it is well-recognized\\\\n\\\\n-->\\\\n\\\\n[TiDB](https://github.com/pingcap/tidb) (/'taɪdiːbi:/, \\\"Ti\\\" stands for Titanium) is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. The goal of TiDB is to provide users with a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services. TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.  \\n\",\n       \"1  TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.\\\\n\\\\nTiDB Self-Managed is a product option of TiDB, where users or organizations can deploy and manage TiDB on their own infrastructure with complete flexibility. With TiDB Self-Managed, you can enjoy the power of open source, distributed SQL while retaining full control over your environment.\\\\n\\\\nThe following video introduces key features of TiDB.\\\\n\\\\n<iframe width=\\\"600\\\" height=\\\"450\\\" src=\\\"https://www.youtube.com/embed/aWBNNPm21zg?enablejsapi=1\\\" title=\\\"Why TiDB?\\\" frameborder=\\\"0\\\" allow=\\\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\\\" allowfullscreen></iframe>\\\\n\\\\n## Key features\\\\n\\\\n- **Easy horizontal scaling**\\\\n\\\\n  The TiDB architecture design separates computing from storage, letting you scale out or scale in the computing or storage capacity online as needed. The scaling process is transparent to application operations and maintenance staff.\\\\n\\\\n- **Financial-grade high availability**\\\\n\\\\n  Data is stored in multiple replicas, and the Multi-Raft protocol is used to obtain the transaction log.  \\n\",\n       \"2                                                                       A transaction can only be committed when data has been successfully written into the majority of replicas. This guarantees strong consistency and availability when a minority of replicas go down. You can configure the geographic location and number of replicas as needed to meet different disaster tolerance levels.\\\\n\\\\n- **Real-time HTAP**\\\\n\\\\n  TiDB provides two storage engines: [TiKV](/tikv-overview.md), a row-based storage engine, and [TiFlash](/tiflash/tiflash-overview.md), a columnar storage engine. \\\\n\\\\n  TiFlash uses the Multi-Raft Learner protocol to replicate data from TiKV in real time, ensuring consistent data between the TiKV row-based storage engine and the TiFlash columnar storage engine. TiKV and TiFlash can be deployed on different machines as needed to solve the problem of HTAP resource isolation.\\\\n\\\\n- **Cloud-native distributed database**\\\\n\\\\n  TiDB is a distributed database designed for the cloud, providing flexible scalability, reliability, and security on the cloud platform. Users can elastically scale TiDB to meet the requirements of their changing workloads.  \\n\",\n       \"3                                                             Users can elastically scale TiDB to meet the requirements of their changing workloads. In TiDB, each piece of data has at least 3 replicas, which can be scheduled in different cloud availability zones to tolerate the outage of a whole data center. [TiDB Operator](https://docs.pingcap.com/tidb-in-kubernetes/stable/tidb-operator-overview) helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes. [TiDB Cloud](https://pingcap.com/tidb-cloud/), the fully-managed TiDB service, is the easiest, most economical, and most resilient way to unlock the full power of [TiDB in the cloud](https://docs.pingcap.com/tidbcloud/), allowing you to deploy and run TiDB clusters with just a few clicks.\\\\n\\\\n- **Compatible with the MySQL protocol and MySQL ecosystem**\\\\n\\\\n  TiDB is compatible with the MySQL protocol, common features of MySQL, and the MySQL ecosystem. To migrate applications to TiDB, you do not need to change a single line of code in many cases, or only need to modify a small amount of code.  \\n\",\n       \"4                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       In addition, TiDB provides a series of [data migration tools](/ecosystem-tool-user-guide.md) to help easily migrate application data into TiDB.\\\\n\\\\n## See also\\\\n\\\\n- [TiDB Architecture](/tidb-architecture.md)\\\\n- [TiDB Storage](/tidb-storage.md)\\\\n- [TiDB Computing](/tidb-computing.md)\\\\n- [TiDB Scheduling](/tidb-scheduling.md)  \"\n      ]\n     },\n     \"execution_count\": 9,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"docs = kb.add(\\\"./fixtures/tidb-overview.md\\\", chunker=text_chunker)\\n\",\n    \"\\n\",\n    \"DataFrame(\\n\",\n    \"    [(c.id, c.text) for c in docs[0].chunks],\\n\",\n    \"    columns=[\\\"id\\\", \\\"text\\\"],\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"84fd9b606e6a11a5\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Search Documents\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 10,\n   \"id\": \"259ad7a9\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:57.567655Z\",\n     \"start_time\": \"2025-04-15T01:31:57.543046Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>text</th>\\n\",\n       \"      <th>score</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>---\\\\ntitle: What is TiDB Self-Managed\\\\nsummary: Learn about the key features and usage scenarios of TiDB.\\\\naliases: ['/docs/dev/key-features/','/tidb/dev/key-features','/docs/dev/overview/']\\\\n---\\\\n\\\\n# What is TiDB Self-Managed\\\\n\\\\n&lt;!-- Localization note for TiDB:\\\\n\\\\n- English: use distributed SQL, and start to emphasize HTAP\\\\n- Chinese: can keep \\\"NewSQL\\\" and emphasize one-stop real-time HTAP (\\\"一栈式实时 HTAP\\\")\\\\n- Japanese: use NewSQL because it is well-recognized\\\\n\\\\n--&gt;\\\\n\\\\n[TiDB](https://github.com/pingcap/tidb) (/'taɪdiːbi:/, \\\"Ti\\\" stands for Titanium) is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. The goal of TiDB is to provide users with a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services. TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.</td>\\n\",\n       \"      <td>0.726047</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.\\\\n\\\\nTiDB Self-Managed is a product option of TiDB, where users or organizations can deploy and manage TiDB on their own infrastructure with complete flexibility. With TiDB Self-Managed, you can enjoy the power of open source, distributed SQL while retaining full control over your environment.\\\\n\\\\nThe following video introduces key features of TiDB.\\\\n\\\\n&lt;iframe width=\\\"600\\\" height=\\\"450\\\" src=\\\"https://www.youtube.com/embed/aWBNNPm21zg?enablejsapi=1\\\" title=\\\"Why TiDB?\\\" frameborder=\\\"0\\\" allow=\\\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\\\" allowfullscreen&gt;&lt;/iframe&gt;\\\\n\\\\n## Key features\\\\n\\\\n- **Easy horizontal scaling**\\\\n\\\\n  The TiDB architecture design separates computing from storage, letting you scale out or scale in the computing or storage capacity online as needed. The scaling process is transparent to application operations and maintenance staff.\\\\n\\\\n- **Financial-grade high availability**\\\\n\\\\n  Data is stored in multiple replicas, and the Multi-Raft protocol is used to obtain the transaction log.</td>\\n\",\n       \"      <td>0.669803</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>Users can elastically scale TiDB to meet the requirements of their changing workloads. In TiDB, each piece of data has at least 3 replicas, which can be scheduled in different cloud availability zones to tolerate the outage of a whole data center. [TiDB Operator](https://docs.pingcap.com/tidb-in-kubernetes/stable/tidb-operator-overview) helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes. [TiDB Cloud](https://pingcap.com/tidb-cloud/), the fully-managed TiDB service, is the easiest, most economical, and most resilient way to unlock the full power of [TiDB in the cloud](https://docs.pingcap.com/tidbcloud/), allowing you to deploy and run TiDB clusters with just a few clicks.\\\\n\\\\n- **Compatible with the MySQL protocol and MySQL ecosystem**\\\\n\\\\n  TiDB is compatible with the MySQL protocol, common features of MySQL, and the MySQL ecosystem. To migrate applications to TiDB, you do not need to change a single line of code in many cases, or only need to modify a small amount of code.</td>\\n\",\n       \"      <td>0.656657</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                        text  \\\\\\n\",\n       \"0                                                                                          ---\\\\ntitle: What is TiDB Self-Managed\\\\nsummary: Learn about the key features and usage scenarios of TiDB.\\\\naliases: ['/docs/dev/key-features/','/tidb/dev/key-features','/docs/dev/overview/']\\\\n---\\\\n\\\\n# What is TiDB Self-Managed\\\\n\\\\n<!-- Localization note for TiDB:\\\\n\\\\n- English: use distributed SQL, and start to emphasize HTAP\\\\n- Chinese: can keep \\\"NewSQL\\\" and emphasize one-stop real-time HTAP (\\\"一栈式实时 HTAP\\\")\\\\n- Japanese: use NewSQL because it is well-recognized\\\\n\\\\n-->\\\\n\\\\n[TiDB](https://github.com/pingcap/tidb) (/'taɪdiːbi:/, \\\"Ti\\\" stands for Titanium) is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. The goal of TiDB is to provide users with a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services. TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.   \\n\",\n       \"1  TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.\\\\n\\\\nTiDB Self-Managed is a product option of TiDB, where users or organizations can deploy and manage TiDB on their own infrastructure with complete flexibility. With TiDB Self-Managed, you can enjoy the power of open source, distributed SQL while retaining full control over your environment.\\\\n\\\\nThe following video introduces key features of TiDB.\\\\n\\\\n<iframe width=\\\"600\\\" height=\\\"450\\\" src=\\\"https://www.youtube.com/embed/aWBNNPm21zg?enablejsapi=1\\\" title=\\\"Why TiDB?\\\" frameborder=\\\"0\\\" allow=\\\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\\\" allowfullscreen></iframe>\\\\n\\\\n## Key features\\\\n\\\\n- **Easy horizontal scaling**\\\\n\\\\n  The TiDB architecture design separates computing from storage, letting you scale out or scale in the computing or storage capacity online as needed. The scaling process is transparent to application operations and maintenance staff.\\\\n\\\\n- **Financial-grade high availability**\\\\n\\\\n  Data is stored in multiple replicas, and the Multi-Raft protocol is used to obtain the transaction log.   \\n\",\n       \"2                                                             Users can elastically scale TiDB to meet the requirements of their changing workloads. In TiDB, each piece of data has at least 3 replicas, which can be scheduled in different cloud availability zones to tolerate the outage of a whole data center. [TiDB Operator](https://docs.pingcap.com/tidb-in-kubernetes/stable/tidb-operator-overview) helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes. [TiDB Cloud](https://pingcap.com/tidb-cloud/), the fully-managed TiDB service, is the easiest, most economical, and most resilient way to unlock the full power of [TiDB in the cloud](https://docs.pingcap.com/tidbcloud/), allowing you to deploy and run TiDB clusters with just a few clicks.\\\\n\\\\n- **Compatible with the MySQL protocol and MySQL ecosystem**\\\\n\\\\n  TiDB is compatible with the MySQL protocol, common features of MySQL, and the MySQL ecosystem. To migrate applications to TiDB, you do not need to change a single line of code in many cases, or only need to modify a small amount of code.   \\n\",\n       \"\\n\",\n       \"      score  \\n\",\n       \"0  0.726047  \\n\",\n       \"1  0.669803  \\n\",\n       \"2  0.656657  \"\n      ]\n     },\n     \"execution_count\": 10,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"result = kb.search_documents(\\n\",\n    \"    query=\\\"What is TiDB?\\\",\\n\",\n    \"    top_k=3,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"DataFrame(\\n\",\n    \"    [(c.text, c.score) for c in result.chunks],\\n\",\n    \"    columns=[\\\"text\\\", \\\"score\\\"],\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"f2a0de8057cdf16b\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Search Knowledge Graph\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 11,\n   \"id\": \"6fc5bc93\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:57.746250Z\",\n     \"start_time\": \"2025-04-15T01:31:57.605589Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>source_entity</th>\\n\",\n       \"      <th>relation</th>\\n\",\n       \"      <th>target_entity</th>\\n\",\n       \"      <th>score</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB Storage is an essential part of how TiDB manages data.</td>\\n\",\n       \"      <td>TiDB Storage</td>\\n\",\n       \"      <td>6.546173</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides TiKV as a row-based storage engine for data storage.</td>\\n\",\n       \"      <td>TiKV</td>\\n\",\n       \"      <td>6.256637</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB Computing describes the processing capabilities of the TiDB database.</td>\\n\",\n       \"      <td>TiDB Computing</td>\\n\",\n       \"      <td>5.975210</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>3</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB has key features that include easy horizontal scaling and financial-grade high availability.</td>\\n\",\n       \"      <td>Key features of TiDB</td>\\n\",\n       \"      <td>5.648048</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>4</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides strong consistency, ensuring that all transactions are immediately visible to users.</td>\\n\",\n       \"      <td>Strong Consistency</td>\\n\",\n       \"      <td>5.378570</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>5</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB Architecture is a key component of the TiDB database system.</td>\\n\",\n       \"      <td>TiDB Architecture</td>\\n\",\n       \"      <td>5.374958</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>6</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB is designed for high availability, ensuring operational continuity even during failures.</td>\\n\",\n       \"      <td>High Availability</td>\\n\",\n       \"      <td>5.220304</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>7</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB is MySQL compatible, enabling users to utilize existing MySQL applications with minimal adjustments.</td>\\n\",\n       \"      <td>MySQL Compatibility</td>\\n\",\n       \"      <td>5.137373</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>8</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB offers OLAP services, enabling fast and interactive access to data for analytical purposes.</td>\\n\",\n       \"      <td>OLAP (Online Analytical Processing)</td>\\n\",\n       \"      <td>5.021178</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>9</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides a series of data migration tools to help easily migrate application data into TiDB.</td>\\n\",\n       \"      <td>data migration tools</td>\\n\",\n       \"      <td>5.002972</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>10</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides TiFlash as a columnar storage engine that replicates data from TiKV.</td>\\n\",\n       \"      <td>TiFlash</td>\\n\",\n       \"      <td>4.756693</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>11</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB features horizontal scalability, allowing it to expand capacity by adding more machines to the cluster.</td>\\n\",\n       \"      <td>Horizontal Scalability</td>\\n\",\n       \"      <td>4.715631</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>12</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides OLTP services, facilitating transaction-oriented applications for data entry and retrieval.</td>\\n\",\n       \"      <td>OLTP (Online Transactional Processing)</td>\\n\",\n       \"      <td>4.683033</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>13</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB supports Hybrid Transactional and Analytical Processing (HTAP) workloads, allowing for simultaneous handling of transactional and analytical tasks.</td>\\n\",\n       \"      <td>Hybrid Transactional and Analytical Processing (HTAP)</td>\\n\",\n       \"      <td>4.431353</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>14</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB Self-Managed is a product option of TiDB that provides users with the ability to deploy and manage TiDB on their own infrastructure.</td>\\n\",\n       \"      <td>TiDB Self-Managed</td>\\n\",\n       \"      <td>4.256526</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>15</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB Scheduling is involved in managing the execution of tasks within the TiDB database.</td>\\n\",\n       \"      <td>TiDB Scheduling</td>\\n\",\n       \"      <td>4.245480</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>16</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB is designed as a cloud-native distributed database providing flexible scalability and reliability.</td>\\n\",\n       \"      <td>Cloud-native</td>\\n\",\n       \"      <td>4.219989</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>17</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB uses the Multi-Raft protocol to ensure high availability by managing transaction logs across multiple replicas.</td>\\n\",\n       \"      <td>Multi-Raft protocol</td>\\n\",\n       \"      <td>3.848345</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>18</th>\\n\",\n       \"      <td>Key features of TiDB</td>\\n\",\n       \"      <td>Another key feature of TiDB is financial-grade high availability, which is achieved through data replication.</td>\\n\",\n       \"      <td>Financial-grade high availability</td>\\n\",\n       \"      <td>3.475075</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>19</th>\\n\",\n       \"      <td>Key features of TiDB</td>\\n\",\n       \"      <td>One of the key features of TiDB is easy horizontal scaling, which allows for flexible resource management.</td>\\n\",\n       \"      <td>Easy horizontal scaling</td>\\n\",\n       \"      <td>3.398429</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"           source_entity  \\\\\\n\",\n       \"0                   TiDB   \\n\",\n       \"1                   TiDB   \\n\",\n       \"2                   TiDB   \\n\",\n       \"3                   TiDB   \\n\",\n       \"4                   TiDB   \\n\",\n       \"5                   TiDB   \\n\",\n       \"6                   TiDB   \\n\",\n       \"7                   TiDB   \\n\",\n       \"8                   TiDB   \\n\",\n       \"9                   TiDB   \\n\",\n       \"10                  TiDB   \\n\",\n       \"11                  TiDB   \\n\",\n       \"12                  TiDB   \\n\",\n       \"13                  TiDB   \\n\",\n       \"14                  TiDB   \\n\",\n       \"15                  TiDB   \\n\",\n       \"16                  TiDB   \\n\",\n       \"17                  TiDB   \\n\",\n       \"18  Key features of TiDB   \\n\",\n       \"19  Key features of TiDB   \\n\",\n       \"\\n\",\n       \"                                                                                                                                                    relation  \\\\\\n\",\n       \"0                                                                                                TiDB Storage is an essential part of how TiDB manages data.   \\n\",\n       \"1                                                                                         TiDB provides TiKV as a row-based storage engine for data storage.   \\n\",\n       \"2                                                                                 TiDB Computing describes the processing capabilities of the TiDB database.   \\n\",\n       \"3                                                          TiDB has key features that include easy horizontal scaling and financial-grade high availability.   \\n\",\n       \"4                                                         TiDB provides strong consistency, ensuring that all transactions are immediately visible to users.   \\n\",\n       \"5                                                                                          TiDB Architecture is a key component of the TiDB database system.   \\n\",\n       \"6                                                              TiDB is designed for high availability, ensuring operational continuity even during failures.   \\n\",\n       \"7                                                  TiDB is MySQL compatible, enabling users to utilize existing MySQL applications with minimal adjustments.   \\n\",\n       \"8                                                           TiDB offers OLAP services, enabling fast and interactive access to data for analytical purposes.   \\n\",\n       \"9                                                          TiDB provides a series of data migration tools to help easily migrate application data into TiDB.   \\n\",\n       \"10                                                                        TiDB provides TiFlash as a columnar storage engine that replicates data from TiKV.   \\n\",\n       \"11                                              TiDB features horizontal scalability, allowing it to expand capacity by adding more machines to the cluster.   \\n\",\n       \"12                                                 TiDB provides OLTP services, facilitating transaction-oriented applications for data entry and retrieval.   \\n\",\n       \"13  TiDB supports Hybrid Transactional and Analytical Processing (HTAP) workloads, allowing for simultaneous handling of transactional and analytical tasks.   \\n\",\n       \"14                 TiDB Self-Managed is a product option of TiDB that provides users with the ability to deploy and manage TiDB on their own infrastructure.   \\n\",\n       \"15                                                                  TiDB Scheduling is involved in managing the execution of tasks within the TiDB database.   \\n\",\n       \"16                                                   TiDB is designed as a cloud-native distributed database providing flexible scalability and reliability.   \\n\",\n       \"17                                      TiDB uses the Multi-Raft protocol to ensure high availability by managing transaction logs across multiple replicas.   \\n\",\n       \"18                                             Another key feature of TiDB is financial-grade high availability, which is achieved through data replication.   \\n\",\n       \"19                                                One of the key features of TiDB is easy horizontal scaling, which allows for flexible resource management.   \\n\",\n       \"\\n\",\n       \"                                            target_entity     score  \\n\",\n       \"0                                            TiDB Storage  6.546173  \\n\",\n       \"1                                                    TiKV  6.256637  \\n\",\n       \"2                                          TiDB Computing  5.975210  \\n\",\n       \"3                                    Key features of TiDB  5.648048  \\n\",\n       \"4                                      Strong Consistency  5.378570  \\n\",\n       \"5                                       TiDB Architecture  5.374958  \\n\",\n       \"6                                       High Availability  5.220304  \\n\",\n       \"7                                     MySQL Compatibility  5.137373  \\n\",\n       \"8                     OLAP (Online Analytical Processing)  5.021178  \\n\",\n       \"9                                    data migration tools  5.002972  \\n\",\n       \"10                                                TiFlash  4.756693  \\n\",\n       \"11                                 Horizontal Scalability  4.715631  \\n\",\n       \"12                 OLTP (Online Transactional Processing)  4.683033  \\n\",\n       \"13  Hybrid Transactional and Analytical Processing (HTAP)  4.431353  \\n\",\n       \"14                                      TiDB Self-Managed  4.256526  \\n\",\n       \"15                                        TiDB Scheduling  4.245480  \\n\",\n       \"16                                           Cloud-native  4.219989  \\n\",\n       \"17                                    Multi-Raft protocol  3.848345  \\n\",\n       \"18                      Financial-grade high availability  3.475075  \\n\",\n       \"19                                Easy horizontal scaling  3.398429  \"\n      ]\n     },\n     \"execution_count\": 11,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"kg = kb.search_knowledge_graph(\\n\",\n    \"    query=\\\"What is TiDB?\\\",\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"# Notice: score is the result of a weighted formula\\n\",\n    \"\\n\",\n    \"DataFrame(\\n\",\n    \"    [\\n\",\n    \"        (r.source_entity.name, r.description, r.target_entity.name, r.score)\\n\",\n    \"        for r in kg.relationships\\n\",\n    \"    ],\\n\",\n    \"    columns=[\\\"source_entity\\\", \\\"relation\\\", \\\"target_entity\\\", \\\"score\\\"],\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"c1f1920c\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Ask question\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 12,\n   \"id\": \"54bab89a\",\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"TiDB is an open-source distributed SQL database designed to support Hybrid Transactional and Analytical Processing (HTAP) workloads. It is compatible with MySQL, allowing users to leverage existing MySQL applications and tools with minimal changes. TiDB features several key attributes:\\n\",\n       \"\\n\",\n       \"1. **High Availability**: TiDB is designed to ensure operational continuity even during failures, providing financial-grade high availability by storing data in multiple replicas.\\n\",\n       \"\\n\",\n       \"2. **Strong Consistency**: It guarantees that all transactions are immediately visible to all users, ensuring a reliable and predictable database experience.\\n\",\n       \"\\n\",\n       \"3. **Horizontal Scalability**: TiDB allows for easy horizontal scaling by separating computing from storage, enabling users to scale out or scale in their computing or storage capacity online as needed.\\n\",\n       \"\\n\",\n       \"4. **Support for OLTP and OLAP**: TiDB provides a one-stop database solution that covers Online Transactional Processing (OLTP), Online Analytical Processing (OLAP), and HTAP services, making it suitable for various use cases that require high availability and strong consistency with large-scale data.\\n\",\n       \"\\n\",\n       \"5. **Cloud-native Architecture**: TiDB is designed for cloud environments, offering flexible scalability, reliability, and security on cloud platforms.\\n\",\n       \"\\n\",\n       \"6. **Data Migration Tools**: TiDB includes a series of data migration tools to facilitate the easy transfer of application data into the TiDB database.\\n\",\n       \"\\n\",\n       \"7. **Storage Engines**: TiDB utilizes two storage engines: TiKV, a row-based storage engine, and TiFlash, a columnar storage engine that replicates data from TiKV in real time.\\n\",\n       \"\\n\",\n       \"Overall, TiDB aims to provide users with a robust and flexible database solution that can adapt to changing workloads and requirements.\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"execution_count\": 12,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"from IPython.display import Markdown\\n\",\n    \"\\n\",\n    \"res = kb.ask(\\\"What is TiDB?\\\")\\n\",\n    \"Markdown(res.message.content)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"id\": \"3ed0149fb5a9e1cb\",\n   \"metadata\": {},\n   \"source\": [\n    \"### Reset the KnowledgeBase\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 13,\n   \"id\": \"4303dc61b3f073f1\",\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-15T01:31:57.880832Z\",\n     \"start_time\": \"2025-04-15T01:31:57.878931Z\"\n    }\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# kb.reset()\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \".venv\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.12.4\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n}\n"
  },
  {
    "path": "core/examples/streamlit/README.md",
    "content": "# Streamlit Examples\n\nThis project demonstrates how to use AutoFlow as a Retrieval-Augmented Generation (RAG) framework and Streamlit as the web framework for building a knowledge search application.\n\n## Prerequisites\n- **Python 3.12.4** (Check the version specified in `.python-version`). You can use `pyenv` to manage your Python versions.\n- **macOS users:** Ensure `mysqlclient` is installed.\n\n## Installation and Usage\n\n**Step 1: Install Dependencies**\n\nCreate a virtual environment and install the required packages:\n\n```bash\npython -m venv .venv\nsource .venv/bin/activate\npip install -r reqs.txt\n```\n\n**Step 2: Run the Streamlit App**\nStart the application with:\n\n```bash\nstreamlit run build-knowledge-search-with-autoflow-and-streamlit.py\n```\n\n**Step 3: Open in Browser**\n\nOnce the app is running, open http://localhost:8501 in your browser and follow these steps:\n\n\n1. Enter your [OpenAI API key](https://platform.openai.com/api-keys) in left sidebar\n2. Enter your TiDB Cloud connection string in the left sidebar. Use the SQLAlchemy format ( `mysql+pymysql://root@localhost:4000/test`) To obtain your connection string, go to the TiDB Cloud console, click **Connect**, and select `SQLAlchemy` from the **Connect With** dropdown.\n3. Press **Enter** to start initialization (this may take a few minutes).\n4. Once initialization completes, upload a file to build the knowledge base. Then enter your queries to retrieve answers from the newly generated knowledge base.\n"
  },
  {
    "path": "core/examples/streamlit/build-knowledge-search-with-autoflow-and-streamlit.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport os\nfrom uuid import UUID\n\nimport streamlit as st\nfrom sqlalchemy import create_engine\nfrom autoflow import Autoflow\nfrom autoflow.types import IndexMethod\nfrom autoflow.llms.chat_models import ChatModel\nfrom autoflow.llms.embeddings import EmbeddingModel\nfrom llama_index.core.llms import ChatMessage\n\nst.set_page_config(\n    page_title=\"Demo of Autoflow and Streamlit\", page_icon=\"📖\", layout=\"wide\"\n)\nst.header(\"📖 Knowledge base app built with Autoflow and Streamlit\")\n\nwith st.sidebar:\n    st.markdown(\n        \"## How to use\\n\"\n        \"1. Enter your [OpenAI API key](https://platform.openai.com/account/api-keys) below 🔑\\n\"  # noqa: E501\n        \"2. Enter your [TiDB Cloud](https://tidbcloud.com) database connection URL below 🔗\\n\"\n        \"3. Upload a pdf, docx, or txt file 📄\\n\"\n        \"4. Ask a question about the document 💬\\n\"\n    )\n    openai_api_key_input = st.text_input(\n        \"OpenAI API Key\",\n        type=\"password\",\n        placeholder=\"Paste your OpenAI API key here (sk-...)\",\n        help=\"You can get your API key from https://platform.openai.com/account/api-keys.\",  # noqa: E501\n        value=os.environ.get(\"OPENAI_API_KEY\", None)\n        or st.session_state.get(\"OPENAI_API_KEY\", \"\"),\n    )\n    database_url_input = st.text_input(\n        \"Database URL\",\n        type=\"password\",\n        placeholder=\"e.g. mysql+pymysql://root@localhost:4000/test\",\n        autocomplete=\"off\",\n        help=\"You can get your database URL from https://tidbcloud.com\",\n        value=os.environ.get(\"DATABASE_URL\", None)\n        or \"mysql+pymysql://root@localhost:4000/test\"\n        or st.session_state.get(\"DATABASE_URL\", \"\"),\n    )\n    st.session_state[\"OPENAI_API_KEY\"] = openai_api_key_input\n    st.session_state[\"DATABASE_URL\"] = database_url_input\n\nopenai_api_key = st.session_state.get(\"OPENAI_API_KEY\")\ndatabase_url = st.session_state.get(\"DATABASE_URL\")\n\nif not openai_api_key or not database_url:\n    st.error(\"Please enter your OpenAI API key and TiDB Cloud connection string.\")\n    st.stop()\n\naf = Autoflow(create_engine(database_url))\nchat_model = ChatModel(\"gpt-4o-mini\", api_key=openai_api_key)\nembedding_model = EmbeddingModel(\n    model_name=\"text-embedding-3-small\",\n    dimensions=1536,\n    api_key=openai_api_key,\n)\nkb = af.create_knowledge_base(\n    id=UUID(\n        \"655b6cf3-8b30-4839-ba8b-5ed3c502f30e\"\n    ),  # For not creating a new KB every time\n    name=\"New KB\",\n    description=\"This is a knowledge base for testing\",\n    index_methods=[IndexMethod.VECTOR_SEARCH, IndexMethod.KNOWLEDGE_GRAPH],\n    chat_model=chat_model,\n    embedding_model=embedding_model,\n)\n\nwith st.form(key=\"file_upload_form\"):\n    uploaded_file = st.file_uploader(\n        \"Upload a .pdf, .docx, .md or .txt file\",\n        type=[\"pdf\", \"docx\", \"txt\", \"md\"],\n        help=\"Scanned documents are not supported yet!\",\n    )\n    upload = st.form_submit_button(\"Upload\")\n    if upload:\n        if not uploaded_file:\n            st.error(\"Please upload a valid file.\")\n            st.stop()\n        file_path = f\"/tmp/{uploaded_file.name}\"\n        with st.spinner(\n            \"Indexing document... This may take a while ⏳(import time; time.sleep(3))\"\n        ):\n            with open(file_path, \"wb\") as f:\n                f.write(uploaded_file.getvalue())\n            kb.add(file_path)\n            import time\n\n            time.sleep(3)\n\nfor line in [\"generated\", \"past\", \"corpus\"]:\n    if line not in st.session_state:\n        st.session_state[line] = []\n\nfor o in [\"kg\"]:\n    if o not in st.session_state:\n        st.session_state[o] = None\n\n\ndef on_submit():\n    user_input = st.session_state.user_input\n    if user_input:\n        result = kb.search_documents(query=user_input, similarity_top_k=3)\n        st.session_state[\"corpus\"] = result.chunks\n        kg = kb.search_knowledge_graph(query=user_input)\n        st.session_state[\"kg\"] = kg\n        messages = [\n            ChatMessage(\n                role=\"system\",\n                content=\"Here are some relevant documents about your query:\\n\\n\"\n                + \"\\n\".join(c.chunk.text for c in result.chunks),\n            ),\n            ChatMessage(\n                role=\"user\",\n                content=user_input + \"\\n(in markdown, removed unused breaklines)\",\n            ),\n        ]\n        resp = chat_model.chat(messages)\n        st.session_state.past.append(user_input)\n        st.session_state.generated.append(str(resp.message))\n\n\nchat_section, corpus_section = st.columns(2)\nwith chat_section:\n    st.markdown(\"##### Chats\")\n    chat_placeholder = st.empty()\n    with chat_placeholder.container():\n        for i in range(len(st.session_state[\"generated\"])):\n            with st.chat_message(\"user\"):\n                st.write(st.session_state[\"past\"][i])\n            with st.chat_message(\"assistant\"):\n                st.write(st.session_state[\"generated\"][i])\n\n    with st.container():\n        st.chat_input(\n            \"Input your question about this document here.\",\n            key=\"user_input\",\n            on_submit=on_submit,\n        )\n\nwith corpus_section:\n    st.markdown(\"##### Vector Search Results\")\n    corpus_placeholder = st.empty()\n    with corpus_placeholder.container():\n        [c.chunk for c in st.session_state[\"corpus\"]] if st.session_state[\n            \"corpus\"\n        ] else \"Please input a query left.\"\n\n    st.markdown(\"##### Knowledge Graph Search Results\")\n    kg_placeholder = st.empty()\n    with kg_placeholder.container():\n        kg = st.session_state[\"kg\"]\n        [\n            r.rag_description for r in kg.relationships\n        ] if kg else \"Please input a query left.\"\n"
  },
  {
    "path": "core/examples/streamlit/reqs.txt",
    "content": "autoflow-ai==0.0.1.dev25\nautoflow-ai[experiment]\npymysql\nstreamlit\nhttpx[socks]"
  },
  {
    "path": "core/experimental/README.md",
    "content": "# Autoflow Experimental\n\nThis folder contains experimental features that are not yet ready for production use.\n"
  },
  {
    "path": "core/experimental/kg_extraction/extract_graph.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import pandas as pd\\n\",\n    \"\\n\",\n    \"pd.set_option(\\\"max_colwidth\\\", None)\\n\",\n    \"pd.set_option(\\\"max_seq_items\\\", None)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from dotenv import load_dotenv\\n\",\n    \"\\n\",\n    \"from autoflow.knowledge_graph.programs.extract_graph import (\\n\",\n    \"    KnowledgeGraphExtractor,\\n\",\n    \"    KnowledgeGraphOutput,\\n\",\n    \")\\n\",\n    \"from autoflow.models.llms import LLM\\n\",\n    \"from autoflow.models.llms.dspy import get_dspy_lm_by_llm\\n\",\n    \"\\n\",\n    \"load_dotenv()\\n\",\n    \"\\n\",\n    \"llm = LLM()\\n\",\n    \"dspy_lm = get_dspy_lm_by_llm(llm)\\n\",\n    \"extractor = KnowledgeGraphExtractor(dspy_lm=dspy_lm)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pathlib import Path\\n\",\n    \"\\n\",\n    \"text = Path(\\\"../tests/fixtures/tidb-overview.md\\\").read_text()\\n\",\n    \"actual_knowledge = extractor.forward(text)\\n\",\n    \"df = actual_knowledge.to_pandas()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>name</th>\\n\",\n       \"      <th>description</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>TiDB Self-Managed</td>\\n\",\n       \"      <td>TiDB Self-Managed is a product option of TiDB that allows users or organizations to deploy and manage TiDB on their own infrastructure with complete flexibility. Users can enjoy the power of open-source distributed SQL while retaining full control over their environment.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>Hybrid Transactional and Analytical Processing (HTAP)</td>\\n\",\n       \"      <td>HTAP is a database processing technique that enables both transactional and analytical workloads to be performed on the same database system without the need for separate systems or data duplication.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>3</th>\\n\",\n       \"      <td>OLTP</td>\\n\",\n       \"      <td>OLTP (Online Transactional Processing) is a class of systems that facilitate and manage transaction-oriented applications, typically for data entry and retrieval transactions in a database.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>4</th>\\n\",\n       \"      <td>OLAP</td>\\n\",\n       \"      <td>OLAP (Online Analytical Processing) is a category of software tools that provide analysis of data for business decisions. It allows users to analyze multidimensional data interactively from multiple perspectives.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>5</th>\\n\",\n       \"      <td>TiKV</td>\\n\",\n       \"      <td>TiKV is a row-based storage engine provided by TiDB for storing data in a distributed manner.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>6</th>\\n\",\n       \"      <td>TiFlash</td>\\n\",\n       \"      <td>TiFlash is a columnar storage engine provided by TiDB for real-time replication of data from TiKV, ensuring consistent data between row-based and columnar storage.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>7</th>\\n\",\n       \"      <td>TiDB Operator</td>\\n\",\n       \"      <td>TiDB Operator is a tool that helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>8</th>\\n\",\n       \"      <td>TiDB Cloud</td>\\n\",\n       \"      <td>TiDB Cloud is a fully-managed TiDB service that provides the easiest, most economical, and most resilient way to deploy and run TiDB clusters in the cloud.</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>9</th>\\n\",\n       \"      <td>MySQL Protocol</td>\\n\",\n       \"      <td>The MySQL Protocol is the protocol used by MySQL database systems for communication between clients and servers. TiDB is compatible with the MySQL protocol, allowing seamless integration with the MySQL ecosystem.</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"                                                    name  \\\\\\n\",\n       \"0                                                   TiDB   \\n\",\n       \"1                                      TiDB Self-Managed   \\n\",\n       \"2  Hybrid Transactional and Analytical Processing (HTAP)   \\n\",\n       \"3                                                   OLTP   \\n\",\n       \"4                                                   OLAP   \\n\",\n       \"5                                                   TiKV   \\n\",\n       \"6                                                TiFlash   \\n\",\n       \"7                                          TiDB Operator   \\n\",\n       \"8                                             TiDB Cloud   \\n\",\n       \"9                                         MySQL Protocol   \\n\",\n       \"\\n\",\n       \"                                                                                                                                                                                                                                                                                                                           description  \\n\",\n       \"0  TiDB is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.  \\n\",\n       \"1                                                      TiDB Self-Managed is a product option of TiDB that allows users or organizations to deploy and manage TiDB on their own infrastructure with complete flexibility. Users can enjoy the power of open-source distributed SQL while retaining full control over their environment.  \\n\",\n       \"2                                                                                                                              HTAP is a database processing technique that enables both transactional and analytical workloads to be performed on the same database system without the need for separate systems or data duplication.  \\n\",\n       \"3                                                                                                                                        OLTP (Online Transactional Processing) is a class of systems that facilitate and manage transaction-oriented applications, typically for data entry and retrieval transactions in a database.  \\n\",\n       \"4                                                                                                                 OLAP (Online Analytical Processing) is a category of software tools that provide analysis of data for business decisions. It allows users to analyze multidimensional data interactively from multiple perspectives.  \\n\",\n       \"5                                                                                                                                                                                                                                        TiKV is a row-based storage engine provided by TiDB for storing data in a distributed manner.  \\n\",\n       \"6                                                                                                                                                                  TiFlash is a columnar storage engine provided by TiDB for real-time replication of data from TiKV, ensuring consistent data between row-based and columnar storage.  \\n\",\n       \"7                                                                                                                                  TiDB Operator is a tool that helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes.  \\n\",\n       \"8                                                                                                                                                                          TiDB Cloud is a fully-managed TiDB service that provides the easiest, most economical, and most resilient way to deploy and run TiDB clusters in the cloud.  \\n\",\n       \"9                                                                                                                 The MySQL Protocol is the protocol used by MySQL database systems for communication between clients and servers. TiDB is compatible with the MySQL protocol, allowing seamless integration with the MySQL ecosystem.  \"\n      ]\n     },\n     \"execution_count\": 4,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"df[\\\"entities\\\"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/html\": [\n       \"<div>\\n\",\n       \"<style scoped>\\n\",\n       \"    .dataframe tbody tr th:only-of-type {\\n\",\n       \"        vertical-align: middle;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe tbody tr th {\\n\",\n       \"        vertical-align: top;\\n\",\n       \"    }\\n\",\n       \"\\n\",\n       \"    .dataframe thead th {\\n\",\n       \"        text-align: right;\\n\",\n       \"    }\\n\",\n       \"</style>\\n\",\n       \"<table border=\\\"1\\\" class=\\\"dataframe\\\">\\n\",\n       \"  <thead>\\n\",\n       \"    <tr style=\\\"text-align: right;\\\">\\n\",\n       \"      <th></th>\\n\",\n       \"      <th>source_entity</th>\\n\",\n       \"      <th>relationship_desc</th>\\n\",\n       \"      <th>target_entity</th>\\n\",\n       \"    </tr>\\n\",\n       \"  </thead>\\n\",\n       \"  <tbody>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>0</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB Self-Managed is a product option of TiDB that allows users or organizations to deploy and manage TiDB on their own infrastructure with complete flexibility.</td>\\n\",\n       \"      <td>TiDB Self-Managed</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>1</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.</td>\\n\",\n       \"      <td>OLTP</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>2</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.</td>\\n\",\n       \"      <td>OLAP</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>3</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.</td>\\n\",\n       \"      <td>Hybrid Transactional and Analytical Processing (HTAP)</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>4</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides two storage engines: TiKV, a row-based storage engine, and TiFlash, a columnar storage engine.</td>\\n\",\n       \"      <td>TiKV</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>5</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB provides two storage engines: TiKV, a row-based storage engine, and TiFlash, a columnar storage engine.</td>\\n\",\n       \"      <td>TiFlash</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>6</th>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"      <td>TiDB is compatible with the MySQL protocol, allowing seamless integration with the MySQL ecosystem.</td>\\n\",\n       \"      <td>MySQL Protocol</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>7</th>\\n\",\n       \"      <td>TiDB Cloud</td>\\n\",\n       \"      <td>TiDB Cloud is a fully-managed TiDB service that provides the easiest, most economical, and most resilient way to deploy and run TiDB clusters in the cloud.</td>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"    </tr>\\n\",\n       \"    <tr>\\n\",\n       \"      <th>8</th>\\n\",\n       \"      <td>TiDB Operator</td>\\n\",\n       \"      <td>TiDB Operator helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes.</td>\\n\",\n       \"      <td>TiDB</td>\\n\",\n       \"    </tr>\\n\",\n       \"  </tbody>\\n\",\n       \"</table>\\n\",\n       \"</div>\"\n      ],\n      \"text/plain\": [\n       \"   source_entity  \\\\\\n\",\n       \"0           TiDB   \\n\",\n       \"1           TiDB   \\n\",\n       \"2           TiDB   \\n\",\n       \"3           TiDB   \\n\",\n       \"4           TiDB   \\n\",\n       \"5           TiDB   \\n\",\n       \"6           TiDB   \\n\",\n       \"7     TiDB Cloud   \\n\",\n       \"8  TiDB Operator   \\n\",\n       \"\\n\",\n       \"                                                                                                                                                                      relationship_desc  \\\\\\n\",\n       \"0                     TiDB Self-Managed is a product option of TiDB that allows users or organizations to deploy and manage TiDB on their own infrastructure with complete flexibility.   \\n\",\n       \"1                                                                                         TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.   \\n\",\n       \"2                                                                                         TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.   \\n\",\n       \"3                                                                                         TiDB provides users with a one-stop database solution covering OLTP, OLAP, and HTAP services.   \\n\",\n       \"4                                                                          TiDB provides two storage engines: TiKV, a row-based storage engine, and TiFlash, a columnar storage engine.   \\n\",\n       \"5                                                                          TiDB provides two storage engines: TiKV, a row-based storage engine, and TiFlash, a columnar storage engine.   \\n\",\n       \"6                                                                                   TiDB is compatible with the MySQL protocol, allowing seamless integration with the MySQL ecosystem.   \\n\",\n       \"7                           TiDB Cloud is a fully-managed TiDB service that provides the easiest, most economical, and most resilient way to deploy and run TiDB clusters in the cloud.   \\n\",\n       \"8  TiDB Operator helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes.   \\n\",\n       \"\\n\",\n       \"                                           target_entity  \\n\",\n       \"0                                      TiDB Self-Managed  \\n\",\n       \"1                                                   OLTP  \\n\",\n       \"2                                                   OLAP  \\n\",\n       \"3  Hybrid Transactional and Analytical Processing (HTAP)  \\n\",\n       \"4                                                   TiKV  \\n\",\n       \"5                                                TiFlash  \\n\",\n       \"6                                         MySQL Protocol  \\n\",\n       \"7                                                   TiDB  \\n\",\n       \"8                                                   TiDB  \"\n      ]\n     },\n     \"execution_count\": 5,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"df[\\\"relationships\\\"]\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"expected_knowledge = KnowledgeGraphOutput(\\n\",\n    \"    entities=[\\n\",\n    \"        {\\\"name\\\": \\\"TiDB\\\", \\\"description\\\": \\\"A distributed SQL database\\\"},\\n\",\n    \"        {\\\"name\\\": \\\"TiKV\\\", \\\"description\\\": \\\"TiKV is a row-based storage engine.\\\"},\\n\",\n    \"        {\\\"name\\\": \\\"TiFlash\\\", \\\"description\\\": \\\"TiFlash is a column-based storage engine.\\\"},\\n\",\n    \"        {\\n\",\n    \"            \\\"name\\\": \\\"OLTP\\\",\\n\",\n    \"            \\\"description\\\": \\\"OLTP is a type of database that is optimized for transaction processing.\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"name\\\": \\\"OLAP\\\",\\n\",\n    \"            \\\"description\\\": \\\"OLAP is a type of database that is optimized for analytics.\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"name\\\": \\\"HTAP\\\",\\n\",\n    \"            \\\"description\\\": \\\"HTAP is a type of database that is optimized for both transaction processing and analytics.\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"name\\\": \\\"TiDB Self-Managed\\\",\\n\",\n    \"            \\\"description\\\": \\\"TiDB Self-Managed is a product option of TiDB.\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"name\\\": \\\"TiDB Cloud\\\",\\n\",\n    \"            \\\"description\\\": \\\"TiDB Cloud is a fully-managed TiDB service.\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"name\\\": \\\"TiDB Operator\\\",\\n\",\n    \"            \\\"description\\\": \\\"TiDB Operator is a tool that helps manage TiDB on Kubernetes.\\\",\\n\",\n    \"        },\\n\",\n    \"    ],\\n\",\n    \"    relationships=[\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"TiKV\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB uses TiKV as its storage engine\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"TiFlash\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB uses TiFlash as its analytics engine\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"OLTP\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB supports OLTP workloads\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"OLAP\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB supports OLAP workloads\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"HTAP\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB supports HTAP workloads\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB Self-Managed\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB Self-Managed is a product option of TiDB\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB Cloud\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"TiDB\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB Cloud is a fully-managed TiDB service.\\\",\\n\",\n    \"        },\\n\",\n    \"        {\\n\",\n    \"            \\\"source_entity\\\": \\\"TiDB Operator\\\",\\n\",\n    \"            \\\"target_entity\\\": \\\"TiDB Cloud\\\",\\n\",\n    \"            \\\"relationship_desc\\\": \\\"TiDB Operator is a tool that helps manage TiDB on Kubernetes.\\\",\\n\",\n    \"        },\\n\",\n    \"    ],\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"metadata\": {\n    \"ExecuteTime\": {\n     \"end_time\": \"2025-04-11T09:00:13.095249Z\",\n     \"start_time\": \"2025-04-11T09:00:11.025732Z\"\n    }\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"0.6111111111111112\"\n      ]\n     },\n     \"execution_count\": 7,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"from autoflow.knowledge_graph.programs.eval_graph import KnowledgeGraphEvaluator\\n\",\n    \"\\n\",\n    \"evaluator = KnowledgeGraphEvaluator(dspy_lm=dspy_lm)\\n\",\n    \"\\n\",\n    \"evaluation_result = evaluator.forward(expected_knowledge, actual_knowledge)\\n\",\n    \"evaluation_result.score\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 9,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"ename\": \"ValueError\",\n     \"evalue\": \"\\\"EntityOutput\\\" object has no field \\\"metadata\\\"\",\n     \"output_type\": \"error\",\n     \"traceback\": [\n      \"\\u001b[31m---------------------------------------------------------------------------\\u001b[39m\",\n      \"\\u001b[31mValueError\\u001b[39m                                Traceback (most recent call last)\",\n      \"\\u001b[36mCell\\u001b[39m\\u001b[36m \\u001b[39m\\u001b[32mIn[9]\\u001b[39m\\u001b[32m, line 4\\u001b[39m\\n\\u001b[32m      1\\u001b[39m \\u001b[38;5;28;01mfrom\\u001b[39;00m\\u001b[38;5;250m \\u001b[39m\\u001b[34;01mautoflow\\u001b[39;00m\\u001b[34;01m.\\u001b[39;00m\\u001b[34;01mknowledge_graph\\u001b[39;00m\\u001b[34;01m.\\u001b[39;00m\\u001b[34;01mprograms\\u001b[39;00m\\u001b[34;01m.\\u001b[39;00m\\u001b[34;01mextract_covariates\\u001b[39;00m\\u001b[38;5;250m \\u001b[39m\\u001b[38;5;28;01mimport\\u001b[39;00m EntityCovariateExtractor\\n\\u001b[32m      3\\u001b[39m metadata_extractor = EntityCovariateExtractor(dspy_lm=dspy_lm)\\n\\u001b[32m----> \\u001b[39m\\u001b[32m4\\u001b[39m actual_knowledge.entities = \\u001b[43mmetadata_extractor\\u001b[49m\\u001b[43m.\\u001b[49m\\u001b[43mforward\\u001b[49m\\u001b[43m(\\u001b[49m\\u001b[43mtext\\u001b[49m\\u001b[43m,\\u001b[49m\\u001b[43m \\u001b[49m\\u001b[43mactual_knowledge\\u001b[49m\\u001b[43m.\\u001b[49m\\u001b[43mentities\\u001b[49m\\u001b[43m)\\u001b[49m\\n\\u001b[32m      6\\u001b[39m \\u001b[38;5;28;01mfor\\u001b[39;00m entity \\u001b[38;5;129;01min\\u001b[39;00m actual_knowledge.entities:\\n\\u001b[32m      7\\u001b[39m     \\u001b[38;5;28mprint\\u001b[39m(entity.name, entity.metadata)\\n\",\n      \"\\u001b[36mFile \\u001b[39m\\u001b[32m~/Projects/autoflow.ai/core/autoflow/knowledge_graph/programs/extract_covariates.py:81\\u001b[39m, in \\u001b[36mEntityCovariateExtractor.forward\\u001b[39m\\u001b[34m(self, text, entities)\\u001b[39m\\n\\u001b[32m     78\\u001b[39m \\u001b[38;5;28;01mfor\\u001b[39;00m entity \\u001b[38;5;129;01min\\u001b[39;00m entities:\\n\\u001b[32m     79\\u001b[39m     \\u001b[38;5;28;01mif\\u001b[39;00m entity.name \\u001b[38;5;129;01min\\u001b[39;00m output_entity_map:\\n\\u001b[32m     80\\u001b[39m         \\u001b[38;5;66;03m# Update the covariates in the metadata of the entity.\\u001b[39;00m\\n\\u001b[32m---> \\u001b[39m\\u001b[32m81\\u001b[39m         \\u001b[43mentity\\u001b[49m\\u001b[43m.\\u001b[49m\\u001b[43mmetadata\\u001b[49m = output_entity_map[entity.name].covariates\\n\\u001b[32m     83\\u001b[39m \\u001b[38;5;28;01mreturn\\u001b[39;00m entities\\n\",\n      \"\\u001b[36mFile \\u001b[39m\\u001b[32m~/Projects/autoflow.ai/core/.venv/lib/python3.12/site-packages/pydantic/main.py:925\\u001b[39m, in \\u001b[36mBaseModel.__setattr__\\u001b[39m\\u001b[34m(self, name, value)\\u001b[39m\\n\\u001b[32m    922\\u001b[39m     \\u001b[38;5;28mself\\u001b[39m.__pydantic_validator__.validate_assignment(\\u001b[38;5;28mself\\u001b[39m, name, value)\\n\\u001b[32m    923\\u001b[39m \\u001b[38;5;28;01melif\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m.model_config.get(\\u001b[33m'\\u001b[39m\\u001b[33mextra\\u001b[39m\\u001b[33m'\\u001b[39m) != \\u001b[33m'\\u001b[39m\\u001b[33mallow\\u001b[39m\\u001b[33m'\\u001b[39m \\u001b[38;5;129;01mand\\u001b[39;00m name \\u001b[38;5;129;01mnot\\u001b[39;00m \\u001b[38;5;129;01min\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m.__pydantic_fields__:\\n\\u001b[32m    924\\u001b[39m     \\u001b[38;5;66;03m# TODO - matching error\\u001b[39;00m\\n\\u001b[32m--> \\u001b[39m\\u001b[32m925\\u001b[39m     \\u001b[38;5;28;01mraise\\u001b[39;00m \\u001b[38;5;167;01mValueError\\u001b[39;00m(\\u001b[33mf\\u001b[39m\\u001b[33m'\\u001b[39m\\u001b[33m\\\"\\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00m\\u001b[38;5;28mself\\u001b[39m.\\u001b[34m__class__\\u001b[39m.\\u001b[34m__name__\\u001b[39m\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[33m\\\"\\u001b[39m\\u001b[33m object has no field \\u001b[39m\\u001b[33m\\\"\\u001b[39m\\u001b[38;5;132;01m{\\u001b[39;00mname\\u001b[38;5;132;01m}\\u001b[39;00m\\u001b[33m\\\"\\u001b[39m\\u001b[33m'\\u001b[39m)\\n\\u001b[32m    926\\u001b[39m \\u001b[38;5;28;01melif\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m.model_config.get(\\u001b[33m'\\u001b[39m\\u001b[33mextra\\u001b[39m\\u001b[33m'\\u001b[39m) == \\u001b[33m'\\u001b[39m\\u001b[33mallow\\u001b[39m\\u001b[33m'\\u001b[39m \\u001b[38;5;129;01mand\\u001b[39;00m name \\u001b[38;5;129;01mnot\\u001b[39;00m \\u001b[38;5;129;01min\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m.__pydantic_fields__:\\n\\u001b[32m    927\\u001b[39m     \\u001b[38;5;28;01mif\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m.model_extra \\u001b[38;5;129;01mand\\u001b[39;00m name \\u001b[38;5;129;01min\\u001b[39;00m \\u001b[38;5;28mself\\u001b[39m.model_extra:\\n\",\n      \"\\u001b[31mValueError\\u001b[39m: \\\"EntityOutput\\\" object has no field \\\"metadata\\\"\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from autoflow.knowledge_graph.programs.extract_covariates import (\\n\",\n    \"    EntityCovariateExtractor,\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"metadata_extractor = EntityCovariateExtractor(dspy_lm=dspy_lm)\\n\",\n    \"actual_knowledge.entities = metadata_extractor.forward(text, actual_knowledge.entities)\\n\",\n    \"\\n\",\n    \"for entity in actual_knowledge.entities:\\n\",\n    \"    print(entity.name, entity.metadata)\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \".venv\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.12.4\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "core/pyproject.toml",
    "content": "[project]\nname = \"autoflow-ai\"\nversion = \"0.0.2.dev5\"\ndescription = \"Framework to index and search your vector data, make your data ready for AI apps, developed by TiDB.\"\nauthors = [\n    { name = \"Mini256\", email = \"minianter@foxmail.com\" },\n    { name = \"sykp241095\", email = \"sykp241095@gmail.com\" },\n]\nreadme = \"README.md\"\nrequires-python = \">=3.10\"\n\n# Dependencies\n\ndependencies = [\n    \"litellm>=1.77.5\",\n    \"llama-index-core>=0.12.23.post2\",\n    \"llama-index-llms-litellm>=0.3.0\",\n    \"llama-index-readers-file>=0.4.6\",\n    \"deepdiff>=8.2.0\",\n    \"pytidb==0.0.4.dev1\",\n    \"markdownify>=0.13.1\",\n    \"playwright>=1.20.0\",\n    \"dspy>=2.6.6\",\n    \"tokenizers>=0.21.0\",\n    \"mypy>=1.15.0\",\n    \"banks>=2.1.1\",\n]\n\n[dependency-groups]\ndev = [\n    \"pytest>=8.3.4\",\n    \"pytest-dotenv>=0.5.2\",\n    \"pre-commit>=4.1.0\",\n    \"pip>=25.0.1\",\n    \"notebook>=7.3.2\",\n    \"markdownify>=0.13.1\",\n    \"playwright>=1.20.0\",\n    \"dspy>=2.6.6\",\n    \"tokenizers>=0.21.0\",\n    \"llama-index-readers-file>=0.4.6\",\n    \"mypy>=1.15.0\",\n    \"numpy>=1.26.4\",\n    \"ipywidgets>=8.1.6\",\n]\n\n# Check\n\n[tool.pyright]\ninclude = [\"autoflow\"]\n\n# Test\n\n[tool.pytest.ini_options]\nlog_cli = true\nlog_cli_level = \"INFO\"\nlog_cli_format = \"%(asctime)s - %(name)s - %(levelname)s - %(message)s\"\nlog_cli_date_format = \"%Y-%m-%d %H:%M:%S\"\nenv_files = [\"./tests/.env\"]\n\n# Build\n\n[tool.setuptools]\npackages = [\"autoflow\"]\n\n[build-system]\nrequires = [\"hatchling==1.26.3\"]\nbuild-backend = \"hatchling.build\"\n\n[tool.hatch.build]\nexclude = [\n    \"/.*\",\n    \"/dist\",\n    \"/docs\",\n    \"/tests\",\n]\n\n[tool.hatch.metadata]\nallow-direct-references = true\n\n[tool.hatch.build.targets.wheel]\npackages = [\"autoflow\"]\n"
  },
  {
    "path": "core/tests/__init__.py",
    "content": ""
  },
  {
    "path": "core/tests/conftest.py",
    "content": "import logging\nimport os\nfrom pathlib import Path\n\nimport pytest\nfrom dotenv import load_dotenv\n\nfrom pytidb import TiDBClient\nfrom autoflow.models.llms import LLM\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.configs.db import DatabaseConfig\nfrom autoflow.db import get_db_engine_from_config\n\nlogger = logging.getLogger(__name__)\n\n\n@pytest.fixture(scope=\"session\", autouse=True)\ndef env():\n    logger.info(f\"Loading environment variables from {Path.cwd() / '.env'}\")\n    load_dotenv()\n\n\n@pytest.fixture(scope=\"session\")\ndef db_engine():\n    config = DatabaseConfig(\n        host=os.getenv(\"TIDB_HOST\"),\n        port=int(os.getenv(\"TIDB_PORT\")),\n        username=os.getenv(\"TIDB_USERNAME\"),\n        password=os.getenv(\"TIDB_PASSWORD\"),\n        database=os.getenv(\"TIDB_DATABASE\"),\n        enable_ssl=False,\n    )\n    return get_db_engine_from_config(config)\n\n\n@pytest.fixture(scope=\"session\")\ndef llm():\n    return LLM(model=\"openai/gpt-4o-mini\")\n\n\n@pytest.fixture(scope=\"session\")\ndef embedding_model():\n    return EmbeddingModel(model_name=\"text-embedding-3-small\")\n\n\n@pytest.fixture(scope=\"session\")\ndef tidb_client(db_engine):\n    return TiDBClient(db_engine=db_engine)\n"
  },
  {
    "path": "core/tests/fixtures/analyze-slow-queries.md",
    "content": "---\ntitle: Analyze Slow Queries\nsummary: Learn how to locate and analyze slow queries.\n---\n\n# Analyze Slow Queries\n\nTo address the issue of slow queries, you need to take the following two steps:\n\n1. Among many queries, identify which type of queries are slow.\n2. Analyze why this type of queries are slow.\n\nYou can easily perform step 1 using the [slow query log](/dashboard/dashboard-slow-query.md) and the [statement summary table](/statement-summary-tables.md) features. It is recommended to use [TiDB Dashboard](/dashboard/dashboard-intro.md), which integrates the two features and directly displays the slow queries in your browser. \n\nThis document focuses on how to perform step 2 - analyze why this type of queries are slow.\n\nGenerally, slow queries have the following major causes:\n\n- Optimizer issues, such as wrong index selected, wrong join type or sequence selected.\n- System issues. All issues not caused by the optimizer are system issues. For example, a busy TiKV instance processes requests slowly; outdated Region information causes slow queries.\n\nIn actual situations, optimizer issues might cause system issues. For example, for a certain type of queries, the optimizer uses a full table scan instead of the index. As a result, the SQL queries consume many resources, which causes the CPU usage of some TiKV instances to soar. This seems like a system issue, but in essence, it is an optimizer issue.\n\nTo identify system issues is relatively simple. To analyze optimizer issues, you need to determine whether the execution plan is reasonable or not. Therefore, it is recommended to analyze slow queries by following these procedures:\n\n1. Identify the performance bottleneck of the query, that is, the time-consuming part of the query process.\n2. Analyze the system issues: analyze the possible causes according to the query bottleneck and the monitoring/log information of that time.\n3. Analyze the optimizer issues: analyze whether there is a better execution plan.\n\nThe procedures above are explained in the following sections.\n\n## Identify the performance bottleneck of the query\n\nFirst, you need to have a general understanding of the query process. The key stages of the query execution process in TiDB are illustrated in [TiDB performance map](/media/performance-map.png).\n\nYou can get the duration information using the following methods:\n\n- [Slow log](/identify-slow-queries.md). It is recommended to view the slow log in [TiDB Dashboard](/dashboard/dashboard-overview.md).\n- [`EXPLAIN ANALYZE` statement](/sql-statements/sql-statement-explain-analyze.md).\n\nThe methods above are different in the following aspects:\n\n- The slow log records the duration of almost all stages of a SQL execution, from parsing to returning results, and is relatively comprehensive (you can query and analyze the slow log in TiDB Dashboard in an intuitive way).\n- By executing `EXPLAIN ANALYZE`, you can learn the time consumption of each operator in an actual SQL execution. The results have more detailed statistics of the execution duration.\n\nIn summary, the slow log and `EXPLAIN ANALYZE` statements help you determine the SQL query is slow in which component (TiDB or TiKV) at which stage of the execution. Therefore, you can accurately identify the performance bottleneck of the query.\n\nIn addition, since v4.0.3, the `Plan` field in the slow log also includes the SQL execution information, which is the result of `EXPLAIN ANALYZE`. So you can find all information of SQL duration in the slow log.\n\n## Analyze system issues\n\nSystem issues can be divided into the following types according to different execution stages of a SQL statement:\n\n1. TiKV is slow in data processing. For example, the TiKV coprocessor processes data slowly.\n2. TiDB is slow in execution. For example, a `Join` operator processes data slowly.\n3. Other key stages are slow. For example, getting the timestamp takes a long time.\n\nFor each slow query, first determine to which type the query belongs, and then analyze it in detail.\n\n### TiKV is slow in data processing\n\nIf TiKV is slow in data processing, you can easily identify it in the result of `EXPLAIN ANALYZE`. In the following example, `StreamAgg_8` and `TableFullScan_15`, two `tikv-task`s (as indicated by `cop[tikv]` in the `task` column), take `170ms` to execute. After subtracting `170ms`, the execution time of TiDB operators account for a very small proportion of the total execution time. This indicates that the bottleneck is in TiKV.\n\n```sql\n+----------------------------+---------+---------+-----------+---------------+------------------------------------------------------------------------------+---------------------------------+-----------+------+\n| id                         | estRows | actRows | task      | access object | execution info                                                               | operator info                   | memory    | disk |\n+----------------------------+---------+---------+-----------+---------------+------------------------------------------------------------------------------+---------------------------------+-----------+------+\n| StreamAgg_16               | 1.00    | 1       | root      |               | time:170.08572ms, loops:2                                                     | funcs:count(Column#5)->Column#3 | 372 Bytes | N/A  |\n| └─TableReader_17           | 1.00    | 1       | root      |               | time:170.080369ms, loops:2, rpc num: 1, rpc time:17.023347ms, proc keys:28672 | data:StreamAgg_8                | 202 Bytes | N/A  |\n|   └─StreamAgg_8            | 1.00    | 1       | cop[tikv] |               | time:170ms, loops:29                                                          | funcs:count(1)->Column#5        | N/A       | N/A  |\n|     └─TableFullScan_15     | 7.00    | 28672   | cop[tikv] | table:t       | time:170ms, loops:29                                                          | keep order:false, stats:pseudo  | N/A       | N/A  |\n+----------------------------+---------+---------+-----------+---------------+------------------------------------------------------------------------------+---------------------------------+-----------+------\n```\n\nIn addition, the `Cop_process` and `Cop_wait` fields in the slow log can also help your analysis. In the following example, the total duration of the query is around `180.85ms`, and the largest `coptask` takes `171ms`. This indicates that the bottleneck of this query is on the TiKV side.\n\nFor the description of each field in the slow log, see [fields description](/identify-slow-queries.md#fields-description).\n\n```log\n# Query_time: 0.18085\n...\n# Num_cop_tasks: 1\n# Cop_process: Avg_time: 170ms P90_time: 170ms Max_time: 170ms Max_addr: 10.6.131.78\n# Cop_wait: Avg_time: 1ms P90_time: 1ms Max_time: 1ms Max_Addr: 10.6.131.78\n```\n\nAfter identifying that TiKV is the bottleneck, you can find out the cause as described in the following sections.\n\n#### TiKV instance is busy\n\nDuring the execution of a SQL statement, TiDB might fetch data from multiple TiKV instances. If one TiKV instance responds slowly, the overall SQL execution speed is slowed down.\n\nThe `Cop_wait` field in the slow log can help you determine this cause.\n\n```log\n# Cop_wait: Avg_time: 1ms P90_time: 2ms Max_time: 110ms Max_Addr: 10.6.131.78\n```\n\nThe log above shows that a `cop-task` sent to the `10.6.131.78` instance waits `110ms` before being executed. It indicates that this instance is busy. You can check the CPU monitoring of that time to confirm the cause.\n\n#### Too many outdated keys\n\nA TiKV instance has much outdated data, which needs to be cleaned up for data scan. This impacts the processing speed.\n\nCheck `Total_keys` and `Processed_keys`. If they are greatly different, the TiKV instance has too many keys of the older versions.\n\n```\n...\n# Total_keys: 2215187529 Processed_keys: 1108056368\n...\n```\n\n### Other key stages are slow\n\n#### Slow in getting timestamps\n\nYou can compare `Wait_TS` and `Query_time` in the slow log. The timestamps are prefetched, so generally `Wait_TS` should be low.\n\n```\n# Query_time: 0.0300000\n...\n# Wait_TS: 0.02500000\n```\n\n#### Outdated Region information\n\nRegion information on the TiDB side might be outdated. In this situation, TiKV might return the `regionMiss` error. Then TiDB gets the Region information from PD again, which is reflected in the `Cop_backoff` information. Both the failed times and the total duration are recorded.\n\n```\n# Cop_backoff_regionMiss_total_times: 200 Cop_backoff_regionMiss_total_time: 0.2 Cop_backoff_regionMiss_max_time: 0.2 Cop_backoff_regionMiss_max_addr: 127.0.0.1 Cop_backoff_regionMiss_avg_time: 0.2 Cop_backoff_regionMiss_p90_time: 0.2\n# Cop_backoff_rpcPD_total_times: 200 Cop_backoff_rpcPD_total_time: 0.2 Cop_backoff_rpcPD_max_time: 0.2 Cop_backoff_rpcPD_max_addr: 127.0.0.1 Cop_backoff_rpcPD_avg_time: 0.2 Cop_backoff_rpcPD_p90_time: 0.2\n```\n\n#### Subqueries are executed in advance\n\nFor statements with non-correlated subqueries, the subquery part might be executed in advance. For example, in `select * from t1 where a = (select max(a) from t2)`, the `select max(a) from t2` part might be executed in advance in the optimization stage. The result of `EXPLAIN ANALYZE` does not show the duration of this type of subqueries.\n\n```sql\nmysql> explain analyze select count(*) from t where a=(select max(t1.a) from t t1, t t2 where t1.a=t2.a);\n+------------------------------+----------+---------+-----------+---------------+--------------------------+----------------------------------+-----------+------+\n| id                           | estRows  | actRows | task      | access object | execution info           | operator info                    | memory    | disk |\n+------------------------------+----------+---------+-----------+---------------+--------------------------+----------------------------------+-----------+------+\n| StreamAgg_59                 | 1.00     | 1       | root      |               | time:4.69267ms, loops:2  | funcs:count(Column#10)->Column#8 | 372 Bytes | N/A  |\n| └─TableReader_60             | 1.00     | 1       | root      |               | time:4.690428ms, loops:2 | data:StreamAgg_48                | 141 Bytes | N/A  |\n|   └─StreamAgg_48             | 1.00     |         | cop[tikv] |               | time:0ns, loops:0        | funcs:count(1)->Column#10        | N/A       | N/A  |\n|     └─Selection_58           | 16384.00 |         | cop[tikv] |               | time:0ns, loops:0        | eq(test.t.a, 1)                  | N/A       | N/A  |\n|       └─TableFullScan_57     | 16384.00 | -1      | cop[tikv] | table:t       | time:0s, loops:0         | keep order:false                 | N/A       | N/A  |\n+------------------------------+----------+---------+-----------+---------------+--------------------------+----------------------------------+-----------+------+\n5 rows in set (7.77 sec)\n```\n\nBut you can identify this type of subquery execution in the slow log:\n\n```\n# Query_time: 7.770634843\n...\n# Rewrite_time: 7.765673663 Preproc_subqueries: 1 Preproc_subqueries_time: 7.765231874\n```\n\nFrom log record above, you can see that a subquery is executed in advance and takes `7.76s`.\n\n### TiDB is slow in execution\n\nAssume that the execution plan in TiDB is correct but the execution is slow. To solve this type of issue, you can adjust parameters or use the hint according to the result of `EXPLAIN ANALYZE` for the SQL statement.\n\nIf the execution plan is incorrect, see the [Analyze optimizer issues](#analyze-optimizer-issues) section.\n\n#### Low concurrency\n\nIf the bottleneck is in the operator with concurrency, speed up the execution by adjusting the concurrency. For example:\n\n```sql\nmysql> explain analyze select sum(t1.a) from t t1, t t2 where t1.a=t2.a;\n+----------------------------------+--------------+-----------+-----------+---------------+-------------------------------------------------------------------------------------+------------------------------------------------+------------------+---------+\n| id                               | estRows      | actRows   | task      | access object | execution info                                                                      | operator info                                  | memory           | disk    |\n+----------------------------------+--------------+-----------+-----------+---------------+-------------------------------------------------------------------------------------+------------------------------------------------+------------------+---------+\n| HashAgg_11                       | 1.00         | 1         | root      |               | time:9.666832189s, loops:2, PartialConcurrency:4, FinalConcurrency:4                | funcs:sum(Column#6)->Column#5                  | 322.125 KB       | N/A     |\n| └─Projection_24                  | 268435456.00 | 268435456 | root      |               | time:9.098644711s, loops:262145, Concurrency:4                                      | cast(test.t.a, decimal(65,0) BINARY)->Column#6 | 199 KB           | N/A     |\n|   └─HashJoin_14                  | 268435456.00 | 268435456 | root      |               | time:6.616773501s, loops:262145, Concurrency:5, probe collision:0, build:881.404µs  | inner join, equal:[eq(test.t.a, test.t.a)]     | 131.75 KB        | 0 Bytes |\n|     ├─TableReader_21(Build)      | 16384.00     | 16384     | root      |               | time:6.553717ms, loops:17                                                           | data:Selection_20                              | 33.6318359375 KB | N/A     |\n|     │ └─Selection_20             | 16384.00     |           | cop[tikv] |               | time:0ns, loops:0                                                                   | not(isnull(test.t.a))                          | N/A              | N/A     |\n|     │   └─TableFullScan_19       | 16384.00     | -1        | cop[tikv] | table:t2      | time:0s, loops:0                                                                    | keep order:false                               | N/A              | N/A     |\n|     └─TableReader_18(Probe)      | 16384.00     | 16384     | root      |               | time:6.880923ms, loops:17                                                           | data:Selection_17                              | 33.6318359375 KB | N/A     |\n|       └─Selection_17             | 16384.00     |           | cop[tikv] |               | time:0ns, loops:0                                                                   | not(isnull(test.t.a))                          | N/A              | N/A     |\n|         └─TableFullScan_16       | 16384.00     | -1        | cop[tikv] | table:t1      | time:0s, loops:0                                                                    | keep order:false                               | N/A              | N/A     |\n+----------------------------------+--------------+-----------+-----------+---------------+-------------------------------------------------------------------------------------+------------------------------------------------+------------------+---------+\n9 rows in set (9.67 sec)\n```\n\nAs shown above, `HashJoin_14` and `Projection_24` consume much of the execution time. Consider increasing their concurrency using SQL variables to speed up execution.\n\nAll system variables are documented in [system-variables](/system-variables.md). To increase the concurrency of `HashJoin_14`, you can modify the `tidb_hash_join_concurrency` system variable.\n\n#### Data is spilled to disk\n\nAnother cause of slow execution is disk spill that occurs during execution if the memory limit is reached. You can find out this cause in the execution plan and the slow log:\n\n```sql\n+-------------------------+-----------+---------+-----------+---------------+------------------------------+----------------------+-----------------------+----------------+\n| id                      | estRows   | actRows | task      | access object | execution info               | operator info        | memory                | disk           |\n+-------------------------+-----------+---------+-----------+---------------+------------------------------+----------------------+-----------------------+----------------+\n| Sort_4                  | 462144.00 | 462144  | root      |               | time:2.02848898s, loops:453  | test.t.a             | 149.68795776367188 MB | 219.3203125 MB |\n| └─TableReader_8         | 462144.00 | 462144  | root      |               | time:616.211272ms, loops:453 | data:TableFullScan_7 | 197.49601364135742 MB | N/A            |\n|   └─TableFullScan_7     | 462144.00 | -1      | cop[tikv] | table:t       | time:0s, loops:0             | keep order:false     | N/A                   | N/A            |\n+-------------------------+-----------+---------+-----------+---------------+------------------------------+----------------------+-----------------------+----------------+\n```\n\n```\n...\n# Disk_max: 229974016\n...\n```\n\n#### Join operations with Cartesian product\n\nJoin operations with Cartesian product generate data volume as large as `left child row count * right child row count`. This is inefficient and should be avoided.\n\nThis type of join operations is marked `CARTESIAN` in the execution plan. For example:\n\n```sql\nmysql> explain select * from t t1, t t2 where t1.a>t2.a;\n+------------------------------+-------------+-----------+---------------+---------------------------------------------------------+\n| id                           | estRows     | task      | access object | operator info                                           |\n+------------------------------+-------------+-----------+---------------+---------------------------------------------------------+\n| HashJoin_8                   | 99800100.00 | root      |               | CARTESIAN inner join, other cond:gt(test.t.a, test.t.a) |\n| ├─TableReader_15(Build)      | 9990.00     | root      |               | data:Selection_14                                       |\n| │ └─Selection_14             | 9990.00     | cop[tikv] |               | not(isnull(test.t.a))                                   |\n| │   └─TableFullScan_13       | 10000.00    | cop[tikv] | table:t2      | keep order:false, stats:pseudo                          |\n| └─TableReader_12(Probe)      | 9990.00     | root      |               | data:Selection_11                                       |\n|   └─Selection_11             | 9990.00     | cop[tikv] |               | not(isnull(test.t.a))                                   |\n|     └─TableFullScan_10       | 10000.00    | cop[tikv] | table:t1      | keep order:false, stats:pseudo                          |\n+------------------------------+-------------+-----------+---------------+---------------------------------------------------------+\n```\n\n## Analyze optimizer issues\n\nTo analyze optimizer issues, you need to determine whether the execution plan is reasonable or not. You need to have some understanding of the optimization process and each operator.\n\nFor the following examples, assume that the table schema is `create table t (id int, a int, b int, c int, primary key(id), key(a), key(b, c))`.\n\n1. `select * from t`: There is no filter condition and a full table scan is performed. So the `TableFullScan` operator is used to read data.\n2. `select a from t where a=2`: There is a filter condition and only the index columns are read, so the `IndexReader` operator is used to read data.\n3. `select * from t where a=2`: There is a filter condition for `a` but the `a` index cannot fully cover the data to be read, so the `IndexLookup` operator is used.\n4. `select b from t where c=3`: Without the prefix condition, the multi-column index cannot be used. So the `IndexFullScan` is used.\n5. ...\n\nThe examples above are operators used for data reads. For more operators, see [Understand TiDB Execution Plan](/explain-overview.md).\n\nIn addition, reading [SQL Tuning Overview](/sql-tuning-overview.md) helps you better understand the TiDB optimizer and determine whether the execution plan is reasonable or not.\n\nMost optimizer issues are explained in [SQL Tuning Overview](/sql-tuning-overview.md). For the solutions, see the following documents:\n\n1. [Wrong Index Solution](/wrong-index-solution.md)\n2. [Wrong join order](/join-reorder.md)\n3. [Expressions are not pushed down](/blocklist-control-plan.md)\n"
  },
  {
    "path": "core/tests/fixtures/tidb-overview.md",
    "content": "---\ntitle: What is TiDB Self-Managed\nsummary: Learn about the key features and usage scenarios of TiDB.\naliases: ['/docs/dev/key-features/','/tidb/dev/key-features','/docs/dev/overview/']\n---\n\n# What is TiDB Self-Managed\n\n<!-- Localization note for TiDB:\n\n- English: use distributed SQL, and start to emphasize HTAP\n- Chinese: can keep \"NewSQL\" and emphasize one-stop real-time HTAP (\"一栈式实时 HTAP\")\n- Japanese: use NewSQL because it is well-recognized\n\n-->\n\n[TiDB](https://github.com/pingcap/tidb) (/'taɪdiːbi:/, \"Ti\" stands for Titanium) is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL compatible and features horizontal scalability, strong consistency, and high availability. The goal of TiDB is to provide users with a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services. TiDB is suitable for various use cases that require high availability and strong consistency with large-scale data.\n\nTiDB Self-Managed is a product option of TiDB, where users or organizations can deploy and manage TiDB on their own infrastructure with complete flexibility. With TiDB Self-Managed, you can enjoy the power of open source, distributed SQL while retaining full control over your environment.\n\nThe following video introduces key features of TiDB.\n\n<iframe width=\"600\" height=\"450\" src=\"https://www.youtube.com/embed/aWBNNPm21zg?enablejsapi=1\" title=\"Why TiDB?\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture\" allowfullscreen></iframe>\n\n## Key features\n\n- **Easy horizontal scaling**\n\n  The TiDB architecture design separates computing from storage, letting you scale out or scale in the computing or storage capacity online as needed. The scaling process is transparent to application operations and maintenance staff.\n\n- **Financial-grade high availability**\n\n  Data is stored in multiple replicas, and the Multi-Raft protocol is used to obtain the transaction log. A transaction can only be committed when data has been successfully written into the majority of replicas. This guarantees strong consistency and availability when a minority of replicas go down. You can configure the geographic location and number of replicas as needed to meet different disaster tolerance levels.\n\n- **Real-time HTAP**\n\n  TiDB provides two storage engines: [TiKV](/tikv-overview.md), a row-based storage engine, and [TiFlash](/tiflash/tiflash-overview.md), a columnar storage engine. TiFlash uses the Multi-Raft Learner protocol to replicate data from TiKV in real time, ensuring consistent data between the TiKV row-based storage engine and the TiFlash columnar storage engine. TiKV and TiFlash can be deployed on different machines as needed to solve the problem of HTAP resource isolation.\n\n- **Cloud-native distributed database**\n\n  TiDB is a distributed database designed for the cloud, providing flexible scalability, reliability, and security on the cloud platform. Users can elastically scale TiDB to meet the requirements of their changing workloads. In TiDB, each piece of data has at least 3 replicas, which can be scheduled in different cloud availability zones to tolerate the outage of a whole data center. [TiDB Operator](https://docs.pingcap.com/tidb-in-kubernetes/stable/tidb-operator-overview) helps manage TiDB on Kubernetes and automates tasks related to operating the TiDB cluster, making TiDB easier to deploy on any cloud that provides managed Kubernetes. [TiDB Cloud](https://pingcap.com/tidb-cloud/), the fully-managed TiDB service, is the easiest, most economical, and most resilient way to unlock the full power of [TiDB in the cloud](https://docs.pingcap.com/tidbcloud/), allowing you to deploy and run TiDB clusters with just a few clicks.\n\n- **Compatible with the MySQL protocol and MySQL ecosystem**\n\n  TiDB is compatible with the MySQL protocol, common features of MySQL, and the MySQL ecosystem. To migrate applications to TiDB, you do not need to change a single line of code in many cases, or only need to modify a small amount of code. In addition, TiDB provides a series of [data migration tools](/ecosystem-tool-user-guide.md) to help easily migrate application data into TiDB.\n\n## See also\n\n- [TiDB Architecture](/tidb-architecture.md)\n- [TiDB Storage](/tidb-storage.md)\n- [TiDB Computing](/tidb-computing.md)\n- [TiDB Scheduling](/tidb-scheduling.md)\n"
  },
  {
    "path": "core/tests/knowledge_base/__init__.py",
    "content": ""
  },
  {
    "path": "core/tests/knowledge_base/test_kb_with_namespace.py",
    "content": "import logging\n\nimport pytest\n\nfrom autoflow.configs.knowledge_base import IndexMethod\nfrom autoflow.knowledge_base import KnowledgeBase\n\nlogger = logging.getLogger(__name__)\n\n\n@pytest.fixture(scope=\"module\")\ndef kb(db_engine, llm, embedding_model):\n    kb = KnowledgeBase(\n        namespace=\"test\",\n        name=\"Test\",\n        description=\"Here is a knowledge base with namespace\",\n        index_methods=[IndexMethod.VECTOR_SEARCH, IndexMethod.KNOWLEDGE_GRAPH],\n        llm=llm,\n        embedding_model=embedding_model,\n        db_engine=db_engine,\n    )\n    logger.info(\n        \"Created a knowledge base with namespace <%s> successfully.\", kb.namespace\n    )\n    return kb\n\n\ndef test_add_documents_via_filepath(kb: KnowledgeBase):\n    docs = kb.add(\"./tests/fixtures/analyze-slow-queries.md\")\n    assert len(docs) == 1\n\n\ndef test_add_documents_via_url(kb: KnowledgeBase):\n    docs = kb.add(\"https://docs.pingcap.com/tidbcloud/tidb-cloud-intro\")\n    assert len(docs) == 1\n\n\ndef test_search_documents(kb: KnowledgeBase):\n    result = kb.search_documents(\n        query=\"What is TiDB?\",\n        top_k=2,\n    )\n    assert len(result.chunks) > 0\n\n\ndef test_search_knowledge_graph(kb: KnowledgeBase):\n    knowledge_graph = kb.search_knowledge_graph(\n        query=\"What is TiDB?\",\n    )\n    assert len(knowledge_graph.entities) > 0\n    assert len(knowledge_graph.relationships) > 0\n"
  },
  {
    "path": "core/tests/knowledge_base/test_kb_without_namespace.py",
    "content": "import logging\n\nimport pytest\n\nfrom autoflow.configs.knowledge_base import IndexMethod\nfrom autoflow.knowledge_base import KnowledgeBase\n\nlogger = logging.getLogger(__name__)\n\n\n@pytest.fixture(scope=\"module\")\ndef kb(db_engine, llm, embedding_model):\n    kb = KnowledgeBase(\n        db_engine=db_engine,\n        name=\"Test\",\n        description=\"Here is a knowledge base without namespace\",\n        index_methods=[IndexMethod.VECTOR_SEARCH, IndexMethod.KNOWLEDGE_GRAPH],\n        llm=llm,\n        embedding_model=embedding_model,\n    )\n    logger.info(\"Created a knowledge base successfully.\")\n    return kb\n\n\ndef test_add_documents_via_filepath(kb: KnowledgeBase):\n    docs = kb.add(\"./tests/fixtures/analyze-slow-queries.md\")\n    assert len(docs) == 1\n\n\ndef test_add_documents_via_url(kb):\n    docs = kb.add(\"https://docs.pingcap.com/tidbcloud/tidb-cloud-intro\")\n    assert len(docs) == 1\n\n\ndef test_search_documents(kb):\n    result = kb.search_documents(\n        query=\"What is TiDB?\",\n        similarity_top_k=2,\n    )\n    assert len(result.chunks) > 0\n\n\ndef test_search_knowledge_graph(kb):\n    knowledge_graph = kb.search_knowledge_graph(\n        query=\"What is TiDB?\",\n    )\n    assert len(knowledge_graph.entities) > 0\n    assert len(knowledge_graph.relationships) > 0\n"
  },
  {
    "path": "core/tests/knowledge_graph/programs/test_extract_graph.py",
    "content": "import logging\nfrom pathlib import Path\nimport pytest\nfrom autoflow.knowledge_graph.programs.eval_graph import KnowledgeGraphEvaluator\nfrom autoflow.knowledge_graph.programs.extract_graph import KnowledgeGraphExtractor\nfrom autoflow.knowledge_graph.types import GeneratedKnowledgeGraph\n\nfrom autoflow.models.llms.dspy import get_dspy_lm_by_llm\n\nlogger = logging.getLogger(__name__)\n\n\n@pytest.fixture(scope=\"module\")\ndef extractor(llm):\n    dspy_lm = get_dspy_lm_by_llm(llm)\n    extractor = KnowledgeGraphExtractor(dspy_lm=dspy_lm)\n    return extractor\n\n\n@pytest.fixture(scope=\"module\")\ndef evaluator(llm):\n    dspy_lm = get_dspy_lm_by_llm(llm)\n    evaluator = KnowledgeGraphEvaluator(dspy_lm=dspy_lm)\n    return evaluator\n\n\ndef test_extract_graph(extractor, evaluator):\n    text = Path(\"tests/fixtures/tidb-overview.md\").read_text()\n\n    # Expected knowledge graph output\n    expected_knowledge = GeneratedKnowledgeGraph.model_validate(\n        {\n            \"entities\": [\n                {\n                    \"name\": \"TiDB\",\n                    \"description\": \"An open-source distributed SQL database that supports HTAP workloads.\",\n                },\n                {\n                    \"name\": \"TiDB Self-Managed\",\n                    \"description\": \"A product option of TiDB where users deploy and manage TiDB on their own infrastructure.\",\n                },\n                {\n                    \"name\": \"TiDB Cloud\",\n                    \"description\": \"The fully-managed TiDB service for deploying and running TiDB clusters in the cloud.\",\n                },\n                {\n                    \"name\": \"TiDB Operator\",\n                    \"description\": \"A tool that helps manage TiDB on Kubernetes and automates tasks related to operating TiDB clusters\",\n                },\n                {\n                    \"name\": \"TiKV\",\n                    \"description\": \"A row-based storage engine used by TiDB.\",\n                },\n                {\n                    \"name\": \"TiFlash\",\n                    \"description\": \"A columnar storage engine used by TiDB.\",\n                },\n                {\n                    \"name\": \"Multi-Raft Learner protocol\",\n                    \"description\": \"A protocol used by TiDB to replicate data from TiKV to TiFlash.\",\n                },\n            ],\n            \"relationships\": [\n                {\n                    \"source_entity_name\": \"TiDB\",\n                    \"target_entity_name\": \"TiKV\",\n                    \"description\": \"TiDB uses TiKV as its storage engine\",\n                },\n                {\n                    \"source_entity_name\": \"TiDB\",\n                    \"target_entity_name\": \"TiFlash\",\n                    \"description\": \"TiDB uses TiFlash as its analytics engine\",\n                },\n                {\n                    \"source_entity_name\": \"TiDB\",\n                    \"target_entity_name\": \"Multi-Raft Learner protocol\",\n                    \"description\": \"TiDB uses the Multi-Raft Learner protocol to replicate data from TiKV to TiFlash.\",\n                },\n                {\n                    \"source_entity_name\": \"TiDB\",\n                    \"target_entity_name\": \"HTAP\",\n                    \"description\": \"TiDB supports HTAP workloads\",\n                },\n                {\n                    \"source_entity_name\": \"TiDB Self-Managed\",\n                    \"target_entity_name\": \"TiDB\",\n                    \"description\": \"TiDB Self-Managed is a product option of TiDB\",\n                },\n                {\n                    \"source_entity_name\": \"TiDB Cloud\",\n                    \"target_entity_name\": \"TiDB\",\n                    \"description\": \"TiDB Cloud is a fully-managed TiDB service\",\n                },\n                {\n                    \"source_entity_name\": \"TiDB Operator\",\n                    \"target_entity_name\": \"TiDB Cloud\",\n                    \"description\": \"TiDB Operator is a tool that helps manage TiDB on Kubernetes and automates tasks related to operating TiDB clusters\",\n                },\n            ],\n        }\n    )\n\n    # Generate knowledge graph\n    actual_knowledge = extractor.forward(text)\n\n    # Use LLM to evaluate the completeness\n    evaluation_result = evaluator.forward(expected_knowledge, actual_knowledge)\n    final_score = evaluation_result.score\n\n    logger.info(f\"Final score: {final_score}\")\n    assert final_score > 0.4, \"The completeness score should be greater than 0.4.\"\n"
  },
  {
    "path": "core/tests/knowledge_graph/test_kg_extractor.py",
    "content": "from pathlib import Path\nfrom autoflow.knowledge_graph.extractors.simple import SimpleKGExtractor\nfrom autoflow.models.llms.dspy import get_dspy_lm_by_llm\n\n\ndef test_kg_extractor(llm):\n    text = Path(\"./tests/fixtures/tidb-overview.md\").read_text()\n    dspy_lm = get_dspy_lm_by_llm(llm)\n    extractor = SimpleKGExtractor(dspy_lm)\n    knowledge_graph = extractor.extract(text)\n    assert knowledge_graph is not None\n    assert len(knowledge_graph.entities) >= 2\n    assert len(knowledge_graph.relationships) >= 1\n\n    for entity in knowledge_graph.entities:\n        assert entity.name is not None\n        assert entity.description is not None\n        assert len(entity.meta) > 0\n\n    for relationship in knowledge_graph.relationships:\n        assert relationship.source_entity_name is not None\n        assert relationship.target_entity_name is not None\n        assert relationship.description is not None\n"
  },
  {
    "path": "core/tests/models/test_model_manager.py",
    "content": "import logging\nimport os\n\nfrom llama_index.core.base.llms.types import ChatMessage\nfrom llama_index.core.schema import NodeWithScore, TextNode\nimport pytest\n\nfrom autoflow.configs.models.providers import ModelProviders\nfrom autoflow.configs.models.providers.openai import OpenAIConfig\nfrom autoflow.models.manager import (\n    model_manager,\n    ProviderConfig,\n)\n\nlogger = logging.getLogger(__name__)\n\n\n@pytest.fixture(scope=\"module\", autouse=True)\ndef setup_model_manager():\n    model_manager.registry_provider(\n        name=ModelProviders.OPENAI,\n        config=OpenAIConfig(\n            api_key=os.getenv(\"OPENAI_API_KEY\"),\n        ),\n    )\n\n    model_manager.registry_provider(\n        name=ModelProviders.JINA_AI,\n        config=ProviderConfig(\n            api_key=os.getenv(\"JINAAI_API_KEY\"),\n        ),\n    )\n\n\ndef test_llm():\n    llm = model_manager.resolve_llm(\n        provider=ModelProviders.OPENAI,\n        config={\n            \"model\": \"gpt-4o\",\n        },\n    )\n\n    res = llm.chat(\n        messages=[\n            ChatMessage(\n                role=\"user\",\n                content=\"Does TiDB Support Vector Search (Y/N)?\",\n            )\n        ],\n        max_tokens=1,\n    )\n    assert res.message.content is not None\n    logger.info(\n        f\"LLM Answer: {res.message.content}\",\n    )\n\n\ndef test_embedding_model():\n    embed_model = model_manager.resolve_embedding_model(\n        provider=ModelProviders.OPENAI,\n        config={\n            \"model\": \"text-embedding-3-small\",\n            \"dimensions\": 1536,\n        },\n    )\n    vector = embed_model.get_query_embedding(\"What is TiDB?\")\n    assert len(vector) == 1536\n\n\ndef test_reranker_model():\n    reranker_model = model_manager.resolve_rerank_model(\n        provider=ModelProviders.JINA_AI,\n        config={\"model\": \"jina-reranker-v2-base-multilingual\"},\n    )\n    nodes = reranker_model.postprocess_nodes(\n        query_str=\"Database\",\n        nodes=[\n            NodeWithScore(node=TextNode(text=\"Redis\")),\n            NodeWithScore(node=TextNode(text=\"OpenAI\")),\n            NodeWithScore(node=TextNode(text=\"TiDB\")),\n        ],\n    )\n    assert len(nodes) == 3\n"
  },
  {
    "path": "core/tests/storage/__init__.py",
    "content": ""
  },
  {
    "path": "core/tests/storage/doc_store/test_tidb_doc_store.py",
    "content": "import pytest\n\nfrom pytidb import TiDBClient\nfrom autoflow.models.embedding_models import EmbeddingModel\nfrom autoflow.storage.doc_store.tidb_doc_store import TiDBDocumentStore\nfrom autoflow.storage.doc_store.types import Document, Chunk\nfrom autoflow.utils.hash import sha256\n\n\n@pytest.fixture(scope=\"session\")\ndef doc_store():\n    tidb_client = TiDBClient.connect()\n    return TiDBDocumentStore(namespace=\"doc_store\", client=tidb_client, vector_dims=3)\n\n\n@pytest.fixture(scope=\"session\")\ndef doc_store_with_auto_embed():\n    tidb_client = TiDBClient.connect()\n    embedding_model = EmbeddingModel(model_name=\"text-embedding-3-small\")\n    return TiDBDocumentStore(\n        namespace=\"doc_store_with_auto_embed\",\n        client=tidb_client,\n        embedding_model=embedding_model,\n    )\n\n\ndef test_crud(doc_store):\n    doc_store.reset()\n\n    # Create\n    documents = doc_store.add(\n        [\n            Document(\n                name=\"TiDB\",\n                content=\"TiDB is a distributed SQL database.\",\n                chunks=[\n                    Chunk(\n                        text=\"TiDB is a distributed SQL database.\", text_vec=[1, 2, 3]\n                    ),\n                ],\n            ),\n            Document(\n                name=\"TiKV\",\n                content=\"TiKV is a distributed key-value storage engine.\",\n                chunks=[\n                    Chunk(\n                        text=\"TiKV is a distributed key-value storage engine.\",\n                        text_vec=[4, 5, 6],\n                    ),\n                ],\n            ),\n            Document(\n                name=\"TiFlash\",\n                content=\"TiFlash is a column-oriented storage engine.\",\n                chunks=[\n                    Chunk(\n                        text=\"TiFlash is a column-oriented storage engine.\",\n                        text_vec=[7, 8, 9],\n                    ),\n                ],\n            ),\n        ]\n    )\n    assert len(documents) == 3\n    for doc in documents:\n        assert doc.id is not None\n        assert doc.created_at is not None\n        assert len(doc.chunks) == 1\n\n        chunk = doc.chunks[0]\n        assert chunk.id is not None\n        assert chunk.document_id == doc.id\n        assert chunk.text == doc.content\n        assert chunk.text_vec is not None\n        assert len(chunk.text_vec) == 3\n\n    # Retrieve - Vector Search\n    results = doc_store.search([4, 5, 6], top_k=2)\n    assert len(results.documents) == 2\n    assert results.documents[0].name == \"TiKV\"\n    assert results.chunks[0].score > 0\n\n    # Update\n    document_id = results.chunks[0].document_id\n    old_chunk = results.chunks[0]\n    old_vector_sha = sha256(str(old_chunk.text_vec))\n    new_chunk = doc_store.update_chunk(\n        old_chunk.id,\n        {\n            \"text\": \"TiKV is a distributed key-value storage engine for TiDB.\",\n            \"text_vec\": [3, 6, 9],\n        },\n    )\n    new_vector_sha = sha256(str(new_chunk.text_vec))\n    assert new_vector_sha != old_vector_sha\n\n    # Delete\n    doc_store.delete_chunk(new_chunk.id)\n    chunks = doc_store.list_doc_chunks(document_id)\n    assert len(chunks) == 0\n\n\ndef test_crud_with_auto_embed(doc_store_with_auto_embed):\n    doc_store_with_auto_embed.reset()\n\n    # Create\n    documents = doc_store_with_auto_embed.add(\n        [\n            Document(\n                name=\"TiDB\",\n                content=\"TiDB is a distributed SQL database.\",\n                chunks=[\n                    Chunk(text=\"TiDB is a distributed SQL database.\"),\n                ],\n            ),\n            Document(\n                name=\"TiKV\",\n                content=\"TiKV is a distributed key-value storage engine.\",\n                chunks=[\n                    Chunk(text=\"TiKV is a distributed key-value storage engine.\"),\n                ],\n            ),\n            Document(\n                name=\"TiFlash\",\n                content=\"TiFlash is a column-oriented storage engine.\",\n                chunks=[\n                    Chunk(text=\"TiFlash is a column-oriented storage engine.\"),\n                ],\n            ),\n        ]\n    )\n    assert len(documents) == 3\n    for doc in documents:\n        assert doc.id is not None\n        assert doc.created_at is not None\n        assert len(doc.chunks) == 1\n\n        chunk = doc.chunks[0]\n        assert chunk.id is not None\n        assert chunk.document_id == doc.id\n        assert chunk.text == doc.content\n        assert chunk.text_vec is not None\n        assert len(chunk.text_vec) == 1536\n\n    # Retrieve - Vector Search\n    results = doc_store_with_auto_embed.search(\"tikv\", top_k=2)\n    assert len(results.documents) == 2\n    assert results.documents[0].name == \"TiKV\"\n    assert results.chunks[0].score > 0\n\n    # Update\n    document_id = results.chunks[0].document_id\n    old_chunk = results.chunks[0]\n    old_vector_sha = sha256(str(old_chunk.text_vec))\n    new_chunk = doc_store_with_auto_embed.update_chunk(\n        old_chunk.id,\n        {\"text\": \"TiKV is a distributed key-value storage engine for TiDB.\"},\n    )\n    new_vector_sha = sha256(str(new_chunk.text_vec))\n    # To check the auto embedding_models is work on updating.\n    assert new_vector_sha != old_vector_sha\n\n    # Delete\n    doc_store_with_auto_embed.delete_chunk(new_chunk.id)\n    chunks = doc_store_with_auto_embed.list_doc_chunks(document_id)\n    assert len(chunks) == 0\n"
  },
  {
    "path": "core/tests/storage/graph_store/__init__.py",
    "content": ""
  },
  {
    "path": "core/tests/storage/graph_store/test_tidb_graph_store.py",
    "content": "from hashlib import sha256\nimport logging\nimport pytest\n\nfrom autoflow.storage.graph_store import TiDBGraphStore\nfrom autoflow.storage.graph_store.types import (\n    EntityType,\n    EntityUpdate,\n    RelationshipUpdate,\n)\n\n\nlogger = logging.getLogger(__name__)\n\n\n@pytest.fixture(scope=\"session\")\ndef graph_store(tidb_client, embedding_model):\n    return TiDBGraphStore(\n        client=tidb_client,\n        embedding_model=embedding_model,\n        namespace=\"tidb_graph_store_test\",\n    )\n\n\ndef test_entity_crud(graph_store: TiDBGraphStore):\n    graph_store.reset()\n\n    # Create entities\n    tidb_entity = graph_store.create_entity(\n        name=\"TiDB\", description=\"TiDB is a relational database.\"\n    )\n    assert tidb_entity.id is not None\n    logger.info(\n        \"tidb_graph_store: add new entity (name: %s, id: %s)\",\n        tidb_entity.name,\n        tidb_entity.id,\n    )\n\n    tikv_entity = graph_store.create_entity(\n        name=\"TiKV\", description=\"TiKV is a distributed key-value storage engine.\"\n    )\n    assert tikv_entity.id is not None\n    logger.info(\n        \"tidb_graph_store: add new entity (name: %s, id: %s)\",\n        tikv_entity.name,\n        tikv_entity.id,\n    )\n\n    # Get entity\n    entity = graph_store.get_entity(tidb_entity.id)\n    assert entity.id is not None\n    assert entity.entity_type == EntityType.original\n    assert entity.name == \"TiDB\"\n    assert entity.embedding is not None\n    assert entity.created_at is not None\n    assert entity.updated_at is not None\n\n    # List entities\n    entities = graph_store.list_entities(entity_id=tidb_entity.id)\n    assert len(entities) == 1\n    assert entities[0].id == tidb_entity.id\n\n    entities = graph_store.list_entities(entity_id=[tidb_entity.id])\n    assert len(entities) == 1\n    assert entities[0].id == tidb_entity.id\n\n    entities = graph_store.list_entities(entity_type=EntityType.original)\n    assert len(entities) == 2\n\n    # Update entity\n    old_embedding = tidb_entity.embedding\n    updated_tidb_entity = graph_store.update_entity(\n        entity=tidb_entity,\n        update=EntityUpdate(\n            name=\"TiDB\", description=\"TiDB is a MySQL-compatible database.\"\n        ),\n    )\n    new_embedding = updated_tidb_entity.embedding\n    assert updated_tidb_entity.id == tidb_entity.id\n    assert updated_tidb_entity.name == \"TiDB\"\n    assert updated_tidb_entity.description == \"TiDB is a MySQL-compatible database.\"\n    assert sha256(new_embedding) != sha256(old_embedding)\n\n    # Delete entity\n    graph_store.delete_entity(tidb_entity.id)\n    try:\n        graph_store.get_entity(tidb_entity.id)\n        raise AssertionError(\"Entity should be deleted\")\n    except Exception as e:\n        logger.info(\n            \"tidb_graph_store: entity %s should be deleted: %s\", tidb_entity.id, e\n        )\n\n    graph_store.reset()\n\n\ndef test_relationship_crud(graph_store: TiDBGraphStore):\n    graph_store.reset()\n\n    # Create entities\n    tidb_entity = graph_store.create_entity(\n        name=\"TiDB\", description=\"TiDB is a relational database.\"\n    )\n    tikv_entity = graph_store.create_entity(\n        name=\"TiKV\", description=\"TiKV is a distributed key-value storage engine.\"\n    )\n\n    # Create relationships\n    relationship = graph_store.create_relationship(\n        source_entity=tidb_entity,\n        target_entity=tikv_entity,\n        description=\"TiDB uses TiKV as its storage engine.\",\n        meta={\n            \"source\": \"TiDB's Documentation\",\n        },\n    )\n    assert relationship.id is not None\n    assert relationship.source_entity_id == tidb_entity.id\n    assert relationship.target_entity_id == tikv_entity.id\n    assert relationship.description == \"TiDB uses TiKV as its storage engine.\"\n    assert relationship.embedding is not None\n    assert relationship.created_at is not None\n    assert relationship.updated_at is not None\n\n    # List relationships\n    relationships = graph_store.list_relationships(entity_id=tidb_entity.id)\n    assert len(relationships) == 1\n    assert relationships[0].id == relationship.id\n\n    # Update relationship\n    old_embedding = relationship.embedding\n    old_updated_at = relationship.updated_at\n    updated_relationship = graph_store.update_relationship(\n        relationship=relationship,\n        update=RelationshipUpdate(\n            description=\"TiDB uses TiKV as its storage engine for TP workloads.\"\n        ),\n    )\n    new_embedding = updated_relationship.embedding\n    new_updated_at = updated_relationship.updated_at\n    assert (\n        updated_relationship.description\n        == \"TiDB uses TiKV as its storage engine for TP workloads.\"\n    )\n    assert sha256(new_embedding) != sha256(old_embedding)\n    assert new_updated_at > old_updated_at\n\n    # Delete relationship\n    graph_store.delete_relationship(relationship.id)\n    try:\n        graph_store.get_relationship(relationship.id)\n        raise AssertionError(\"Relationship should be deleted\")\n    except Exception as e:\n        logger.info(\n            \"tidb_graph_store: relationship %s should be deleted: %s\",\n            relationship.id,\n            e,\n        )\n\n    graph_store.reset()\n\n\ndef test_entity_degree(graph_store: TiDBGraphStore):\n    graph_store.reset()\n\n    # Create entities\n    tidb_entity = graph_store.create_entity(\n        name=\"TiDB\", description=\"TiDB is a relational database.\"\n    )\n    tikv_entity = graph_store.create_entity(\n        name=\"TiKV\", description=\"TiKV is a distributed key-value storage engine.\"\n    )\n    tiflash_entity = graph_store.create_entity(\n        name=\"TiFlash\", description=\"TiFlash is a column-oriented database engine.\"\n    )\n\n    # Create relationships\n    graph_store.create_relationship(\n        source_entity=tidb_entity,\n        target_entity=tikv_entity,\n        description=\"TiDB uses TiKV as its storage engine.\",\n    )\n    graph_store.create_relationship(\n        source_entity=tidb_entity,\n        target_entity=tiflash_entity,\n        description=\"TiDB uses TiFlash as its analytical engine.\",\n    )\n\n    # Calculate entity degree\n    out_degree = graph_store.calc_entity_out_degree(tidb_entity.id)\n    assert out_degree == 2\n\n    in_degree = graph_store.calc_entity_in_degree(tidb_entity.id)\n    assert in_degree == 0\n\n    degree = graph_store.calc_entity_degree(tidb_entity.id)\n    assert degree == 2\n\n    # Calculate entities degree\n    degrees = graph_store.calc_entities_degrees(\n        [tidb_entity.id, tikv_entity.id, tiflash_entity.id]\n    )\n    assert degrees[tidb_entity.id].out_degree == 2\n    assert degrees[tidb_entity.id].in_degree == 0\n    assert degrees[tidb_entity.id].degrees == 2\n\n    assert degrees[tikv_entity.id].out_degree == 0\n    assert degrees[tikv_entity.id].in_degree == 1\n    assert degrees[tikv_entity.id].degrees == 1\n\n    assert degrees[tiflash_entity.id].out_degree == 0\n    assert degrees[tiflash_entity.id].in_degree == 1\n    assert degrees[tiflash_entity.id].degrees == 1\n\n    graph_store.reset()\n"
  },
  {
    "path": "docker-compose-cn.yml",
    "content": "name: tidb-ai\n\nservices:\n  redis:\n    image: registry.cn-beijing.aliyuncs.com/pingcap-ee/redis:6.0.16\n    restart: always\n    volumes:\n      - ./redis-data:/data\n    command: [\"redis-server\", \"--loglevel\", \"warning\"]\n\n  backend:\n    image: registry.cn-beijing.aliyuncs.com/pingcap-ee/tidb.ai-backend:0.4.0\n    restart: always\n    depends_on:\n      - redis\n    ports:\n      - \"8000:80\"\n    env_file:\n      - .env\n    volumes:\n      - ./data:/shared/data\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  frontend:\n    image: registry.cn-beijing.aliyuncs.com/pingcap-ee/tidb.ai-frontend:0.4.0\n    restart: always\n    depends_on:\n      - backend\n    ports:\n      - 3000:3000\n    environment:\n      BASE_URL: http://backend\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  background:\n    image: registry.cn-beijing.aliyuncs.com/pingcap-ee/tidb.ai-backend:0.4.0\n    restart: always\n    depends_on:\n      - redis\n    ports:\n      - \"5555:5555\"\n    env_file:\n      - .env\n    volumes:\n      - ./data:/shared/data\n    command: /usr/bin/supervisord\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  local-embedding-reranker:\n    image: registry.cn-beijing.aliyuncs.com/pingcap-ee/tidb.ai-local-embedding-reranker:v4-with-cache\n    ports:\n      - 5001:5001\n    environment:\n      - HF_ENDPOINT=https://hf-mirror.com\n      - PRE_LOAD_DEFAULT_EMBEDDING_MODEL=true\n      # If you want to pre-load the default reranker model, change the following environment to true\n      - PRE_LOAD_DEFAULT_RERANKER_MODEL=false\n      - TRANSFORMERS_OFFLINE=1\n    # volumes:\n    #  - ./local-embedding-reranker:/root/.cache/huggingface\n    # If you are using NVIDIA GPU, you can uncomment the following lines to enable GPU support\n    # deploy:\n    #   resources:\n    #     reservations:\n    #       devices:\n    #         - driver: nvidia\n    #           count: 1\n    #           capabilities: [gpu]\n    profiles:\n      - local-embedding-reranker\n"
  },
  {
    "path": "docker-compose.dev.yml",
    "content": "name: tidb-ai-build\n\nservices:\n  backend:\n    build:\n      context: backend\n      dockerfile: Dockerfile\n      args:\n        BUILDKIT_INLINE_CACHE: 1\n    ports:\n      - \"8006:80\"\n    env_file:\n      - .env\n    volumes:\n      - ./data:/shared/data\n    depends_on:\n      - redis\n\n  frontend:\n    build:\n      context: .\n      dockerfile: ./frontend/Dockerfile\n      args:\n        BUILDKIT_INLINE_CACHE: 1\n    ports:\n      - \"3001:3000\"\n    environment:\n      BASE_URL: http://backend\n    depends_on:\n      - backend\n\n  background:\n    build:\n      context: backend\n      dockerfile: Dockerfile\n      args:\n        BUILDKIT_INLINE_CACHE: 1\n    ports:\n      - \"5556:5555\"\n    env_file:\n      - .env\n    volumes:\n      - ./data:/shared/data\n    command: /usr/bin/supervisord\n    depends_on:\n      - redis\n\n  local-embedding-reranker:\n    build:\n      context: backend/local_embedding_reranker\n      dockerfile: Dockerfile\n      args:\n        BUILDKIT_INLINE_CACHE: 1\n    ports:\n      - \"5002:5001\"\n    environment:\n      - PRE_LOAD_DEFAULT_EMBEDDING_MODEL=true\n      - PRE_LOAD_DEFAULT_RERANKER_MODEL=false\n      - TRANSFORMERS_OFFLINE=1\n    profiles:\n      - local-embedding-reranker\n\n  redis:\n    image: redis:6.0.16 \n    volumes:\n      - ./redis-data:/data\n    command: [\"redis-server\", \"--loglevel\", \"warning\"]"
  },
  {
    "path": "docker-compose.yml",
    "content": "name: tidb-ai\n\nservices:\n  redis:\n    image: redis:6.0.16\n    restart: always\n    volumes:\n      - ./redis-data:/data\n    command: [\"redis-server\", \"--loglevel\", \"warning\"]\n\n  backend:\n    image: tidbai/backend:0.4.0\n    restart: always\n    depends_on:\n      - redis\n    ports:\n      - \"8000:80\"\n    env_file:\n      - .env\n    volumes:\n      - ./data:/shared/data\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  frontend:\n    image: tidbai/frontend:0.4.0\n    restart: always\n    depends_on:\n      - backend\n    ports:\n      - 3000:3000\n    environment:\n      BASE_URL: http://backend\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  background:\n    image: tidbai/backend:0.4.0\n    restart: always\n    depends_on:\n      - redis\n    ports:\n      - \"5555:5555\"\n    env_file:\n      - .env\n    volumes:\n      - ./data:/shared/data\n    command: /usr/bin/supervisord\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  local-embedding-reranker:\n    image: tidbai/local-embedding-reranker:v4-with-cache\n    ports:\n      - 5001:5001\n    environment:\n      - PRE_LOAD_DEFAULT_EMBEDDING_MODEL=true\n      # If you want to pre-load the default reranker model, change the following environment to true\n      - PRE_LOAD_DEFAULT_RERANKER_MODEL=false\n      - TRANSFORMERS_OFFLINE=1\n    # volumes:\n    #   - ./local-embedding-reranker:/root/.cache/huggingface\n    # If you are using NVIDIA GPU, you can uncomment the following lines to enable GPU support\n    # deploy:\n    #   resources:\n    #     reservations:\n    #       devices:\n    #         - driver: nvidia\n    #           count: 1\n    #           capabilities: [gpu]\n    profiles:\n      - local-embedding-reranker\n"
  },
  {
    "path": "docs/.gitignore",
    "content": "# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.\n\n# dependencies\n/node_modules\n/.pnp\n.pnp.js\n.yarn/install-state.gz\n\n# testing\n/coverage\n\n# next.js\n/.next/\n/out/\n\n# production\n/build\n\n# misc\n.DS_Store\n*.pem\n\n# debug\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\n\n# local env files\n.env*.local\n\n# vercel\n.vercel\n\n# typescript\n*.tsbuildinfo\nnext-env.d.ts\n\ncertificates\n\npublic/widget.js\n\npublic/robots.txt\npublic/sitemap.xml\npublic/sitemap-*.xml\n\n*storybook.log\nstorybook-static\n\n\n_pagefind/"
  },
  {
    "path": "docs/mdx-components.ts",
    "content": "import { useMDXComponents as getDocsMDXComponents } from 'nextra-theme-docs'\n\nconst docsComponents = getDocsMDXComponents()\n\nexport const useMDXComponents = components => ({\n  ...docsComponents,\n  ...components\n})\n"
  },
  {
    "path": "docs/next-sitemap.config.js",
    "content": "module.exports = {\n  siteUrl: process.env.SITE_URL || 'https://tidb.ai',\n  generateRobotsTxt: true,// (optional)\n  // ...other options\n}\n"
  },
  {
    "path": "docs/next.config.mjs",
    "content": "import nextra from 'nextra'\n\nconst withNextra = nextra({\n  latex: true,\n  defaultShowCopyCode: true,\n  search: {\n    codeblocks: true\n  },\n  contentDirBasePath: '/',\n})\n\nexport default withNextra({\n  // reactStrictMode: true\n})\n"
  },
  {
    "path": "docs/package.json",
    "content": "{\n  \"name\": \"example-docs\",\n  \"license\": \"MIT\",\n  \"private\": true,\n  \"scripts\": {\n    \"build\": \"next build\",\n    \"dev\": \"next --turbopack\",\n    \"postbuild\": \"pagefind --site .next/server/app --output-path public/_pagefind\",\n    \"start\": \"next start\"\n  },\n  \"dependencies\": {\n    \"next\": \"15.0.7\",\n    \"nextra\": \"^4.2.17\",\n    \"nextra-theme-docs\": \"^4.2.17\",\n    \"react\": \"19.1.0\",\n    \"react-dom\": \"19.1.0\",\n    \"tailwindcss\": \"^4.1.4\"\n  },\n  \"devDependencies\": {\n    \"@types/node\": \"22.14.1\",\n    \"@types/react\": \"19.1.2\",\n    \"pagefind\": \"^1.3.0\"\n  },\n  \"packageManager\": \"pnpm@9.15.0+sha512.76e2379760a4328ec4415815bcd6628dee727af3779aaa4c914e3944156c4299921a89f976381ee107d41f12cfa4b66681ca9c718f0668fa0831ed4c6d8ba56c\"\n}\n"
  },
  {
    "path": "docs/src/app/[[...mdxPath]]/page.jsx",
    "content": "import { generateStaticParamsFor, importPage } from 'nextra/pages'\nimport { useMDXComponents as getMDXComponents } from '../../../mdx-components'\n\nexport const generateStaticParams = generateStaticParamsFor('mdxPath')\n\nexport async function generateMetadata(props) {\n  const params = await props.params\n  const { metadata } = await importPage(params.mdxPath)\n  return metadata\n}\n\nconst Wrapper = getMDXComponents().wrapper\n\nexport default async function Page(props) {\n  const params = await props.params\n  const result = await importPage(params.mdxPath)\n  const { default: MDXContent, toc, metadata } = result\n  return (\n    <Wrapper toc={toc} metadata={metadata}>\n      <MDXContent {...props} params={params} />\n    </Wrapper>\n  )\n}\n"
  },
  {
    "path": "docs/src/app/_app.tsx",
    "content": "import \"./globals.css\";\nimport type { AppProps } from \"next/app\";\n\nexport default function App({ Component, pageProps }: AppProps) {\n  return <Component {...pageProps} />;\n}\n"
  },
  {
    "path": "docs/src/app/_ignored/_meta.js",
    "content": "// This file will be NOT treated as `_meta` file, since directory starts with underscore\nexport default {}\n"
  },
  {
    "path": "docs/src/app/_ignored/page.mdx",
    "content": "This file will be NOT treated as page, since directory starts with underscore\n"
  },
  {
    "path": "docs/src/app/_meta.ts",
    "content": "import type { Meta } from 'nextra'\n\nexport default {\n  docs: {\n    type: 'page',\n    title: '📘 Documentation'\n  }\n} satisfies Meta\n"
  },
  {
    "path": "docs/src/app/globals.css",
    "content": "@tailwind base;\n@tailwind components;\n@tailwind utilities;\n\n:root {\n  --foreground-rgb: 0, 0, 0;\n  --background-start-rgb: 214, 219, 220;\n  --background-end-rgb: 255, 255, 255;\n  --logo-color: #212121;\n}\n\n@media (prefers-color-scheme: dark) {\n  :root {\n    --foreground-rgb: 255, 255, 255;\n    --background-start-rgb: 0, 0, 0;\n    --background-end-rgb: 0, 0, 0;\n    --logo-color: #f5f5f5;\n  }\n}\n\n/* body {\n  color: rgb(var(--foreground-rgb));\n  background: linear-gradient(\n      to bottom,\n      transparent,\n      rgb(var(--background-end-rgb))\n    )\n    rgb(var(--background-start-rgb));\n} */\n\n@layer utilities {\n  .text-balance {\n    text-wrap: balance;\n  }\n}\n\n.dark {\n  .logo {\n    .logo-bg {\n      fill: #f5f5f5;\n    }\n    .logo-circle {\n      fill: #212121;\n    }\n  }\n}\n\n.logo {\n  .logo-bg {\n    fill: var(--logo-color);\n  }\n  .logo-circle {\n    fill: #f5f5f5;\n  }\n}\n"
  },
  {
    "path": "docs/src/app/layout.jsx",
    "content": "import 'nextra-theme-docs/style.css'\nimport './globals.css';\n\nimport { Banner, Head } from 'nextra/components'\n/* eslint-env node */\nimport { Footer, Layout, Navbar } from 'nextra-theme-docs'\n\nimport { getPageMap } from 'nextra/page-map'\n\nexport const metadata = {\n  metadataBase: new URL('https://autoflow.ai'),\n  title: {\n    template: 'AutoFlow'\n  },\n  description: 'Docs & Blogs of AutoFlow',\n  applicationName: 'AutoFlow',\n  generator: 'Next.js',\n  twitter: {\n    site: 'https://twitter.com/tidb_developer'\n  }\n}\n\nexport default async function RootLayout({ children }) {\n  const navbar = (\n    <Navbar\n      logo={\n        <div style={{ display: 'flex', alignItems: 'center' }}>\n          <svg width=\"24\" height=\"24\" viewBox=\"0 0 745 745\" fill=\"none\" className=\"logo\">\n            <rect width=\"745\" height=\"745\" rx=\"120\" className=\"logo-bg\" />\n            <rect x=\"298\" y=\"172\" width=\"150\" height=\"150\" rx=\"24\" className=\"logo-circle\" />\n            <rect x=\"298\" y=\"422\" width=\"150\" height=\"150\" rx=\"24\" className=\"logo-circle\" />\n          </svg>\n          <span style={{ marginLeft: '.5em', fontWeight: 300, fontSize: '20px' }}>\n            AutoFlow\n          </span>\n        </div>\n      }\n      logoLink=\"/\"\n      projectLink=\"https://github.com/pingcap/autoflow\"\n    >\n      <div style={{ display: 'flex', alignItems: 'center', gap: '1rem' }}>\n        <a\n          target=\"_blank\"\n          href=\"https://twitter.com/tidb_developer\"\n          aria-label=\"TiDB Developer Twitter\"\n          rel=\"nofollow noreferrer\"\n        >\n          <svg\n            xmlns=\"http://www.w3.org/2000/svg\"\n            width=\"24\"\n            height=\"24\"\n            viewBox=\"0 0 24 24\"\n            fill=\"currentColor\"\n            stroke=\"currentColor\"\n            strokeWidth=\"2\"\n            strokeLinecap=\"round\"\n            strokeLinejoin=\"round\"\n            className=\"feather feather-twitter\"\n          >\n            <path d=\"M23 3a10.9 10.9 0 0 1-3.14 1.53 4.48 4.48 0 0 0-7.86 3v1A10.66 10.66 0 0 1 3 4s-4 9 5 13a11.64 11.64 0 0 1-7 2c9 5 20 0 20-11.5a4.5 4.5 0 0 0-.08-.83A7.72 7.72 0 0 0 23 3z\" />\n          </svg>\n        </a>\n        <a\n          target=\"_blank\"\n          href=\"https://pingcap.com/ai?utm_source=tidb.ai&utm_medium=community\"\n          aria-label=\"TiDB Vector\"\n          rel=\"nofollow noreferrer\"\n        >\n          <svg\n            xmlns=\"http://www.w3.org/2000/svg\"\n            width=\"24\"\n            height=\"24\"\n            viewBox=\"0 0 161.24 186.18\"\n            // viewBox='0 0 24 24'\n            fill=\"currentColor\"\n            stroke=\"currentColor\"\n            strokeWidth=\"2\"\n            strokeLinecap=\"round\"\n            strokeLinejoin=\"round\"\n            className=\"feather feather-tidb\"\n          >\n            <path fill=\"currentColor\" d=\"M80.62,0L0,46.54v93.09l80.62,46.54,80.62-46.54V46.54L80.62,0ZM80.57,61.98v93.12l-26.77-15.43v-62.24l-26.78,15.46v-30.91l53.54-30.91,26.77,15.45-26.76,15.45ZM134.36,124.12l-26.88,15.52v-62.04l26.88-15.53v62.06Z\" />\n          </svg>\n        </a>\n      </div>\n    </Navbar>\n  )\n  const pageMap = await getPageMap()\n  return (\n    <html lang=\"en\" dir=\"ltr\" suppressHydrationWarning>\n      <Head>\n        <link\n          rel=\"shortcut icon\"\n          href=\"/icon-light.svg\"\n          type=\"image/svg+xml\"\n          media=\"(prefers-color-scheme: dark)\"\n        />\n        <link\n          rel=\"shortcut icon\"\n          href=\"/icon-dark.svg\"\n          type=\"image/svg+xml\"\n          media=\"(prefers-color-scheme: light)\"\n        />\n      </Head>\n      <body>\n        <Layout\n          navbar={navbar}\n          footer={\n            <Footer>\n              <span>\n                <svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 161.24 186.18\" className=\"logo\" style={{ width: '24px', height: '24px' }}>\n                  <g>\n                    <path fill=\"currentColor\" d=\"M80.62,0L0,46.54v93.09l80.62,46.54,80.62-46.54V46.54L80.62,0ZM80.57,61.98v93.12l-26.77-15.43v-62.24l-26.78,15.46v-30.91l53.54-30.91,26.77,15.45-26.76,15.45ZM134.36,124.12l-26.88,15.52v-62.04l26.88-15.53v62.06Z\" />\n                  </g>\n                </svg>\n                <br />\n                {new Date().getFullYear()} © <a href=\"https://pingcap.com\" target=\"_blank\" rel=\"noopener noreferrer\">PingCAP</a>. All rights reserved.\n              </span>\n            </Footer>\n          }\n          editLink=\"Edit this page on GitHub\"\n          docsRepositoryBase=\"https://github.com/pingcap/autoflow\"\n          sidebar={{ toggleButton: true, defaultMenuCollapseLevel: 1 }}\n          pageMap={pageMap}\n        >\n          {children}\n        </Layout>\n      </body>\n    </html>\n  )\n}\n"
  },
  {
    "path": "docs/src/content/README.md",
    "content": "<!-- markdownlint-disable MD033 MD041 -->\n\n<div align=\"center\">\n  <h1>AutoFlow</h1>\n  \n  <a href='https://www.pingcap.com/tidb-cloud-serverless/?utm_source=tidb.ai&utm_medium=community'>\n    <img src=\"https://raw.githubusercontent.com/pingcap/tidb.ai/main/docs/public/icon-dark.svg\" alt=\"AutoFlow\" width =100 height=100></img>\n  </a>\n\n  <a href=\"https://trendshift.io/repositories/12294\" target=\"_blank\"><img src=\"https://trendshift.io/api/badge/repositories/12294\" alt=\"pingcap%2Fautoflow | Trendshift\" style=\"width: 250px; height: 55px;\" width=\"250\" height=\"55\"/></a>\n\n  [![Backend Docker Image Version](https://img.shields.io/docker/v/tidbai/backend?sort=semver&arch=amd64&label=tidbai%2Fbackend&color=blue&logo=fastapi)](https://hub.docker.com/r/tidbai/backend)\n  [![Frontend Docker Image Version](https://img.shields.io/docker/v/tidbai/frontend?sort=semver&arch=amd64&label=tidbai%2Ffrontend&&color=blue&logo=next.js)](https://hub.docker.com/r/tidbai/frontend)\n  [![E2E Status](https://img.shields.io/github/check-runs/pingcap/tidb.ai/main?nameFilter=E2E%20Test&label=e2e)](https://tidb-ai-playwright.vercel.app/)\n</div>\n\n> [!WARNING]\n> Autoflow is still in the early stages of development. And we are actively working on it, the next move is to make it to a python package and make it more user-friendly e.g. `pip install autoflow-ai`. If you have any questions or suggestions, please feel free to contact us on [Discussion](https://github.com/pingcap/autoflow/discussions).\n\n## Introduction\n\nAutoFlow is an open source graph rag (graphrag: knowledge graph rag) based knowledge base tool built on top of [TiDB Vector](https://www.pingcap.com/ai?utm_source=tidb.ai&utm_medium=community) and [LlamaIndex](https://github.com/run-llama/llama_index) and [DSPy](https://github.com/stanfordnlp/dspy).\n\n- **Live Demo**: [https://tidb.ai](https://tidb.ai?utm_source=tidb.ai&utm_medium=community)\n- **Deployment Docs**: [Deployment Docs](https://autoflow.tidb.ai/?utm_source=github&utm_medium=tidb.ai)\n\n## Features\n\n1. **Perplexity-style Conversational Search page**: Our platform features an advanced built-in website crawler, designed to elevate your browsing experience. This crawler effortlessly navigates official and documentation sites, ensuring comprehensive coverage and streamlined search processes through sitemap URL scraping.\n\n![Image](https://github.com/user-attachments/assets/50a4e5ce-8b93-446a-8ce7-11ed7844bd1e)\n\n2. **Embeddable JavaScript Snippet**: Integrate our conversational search window effortlessly into your website by copying and embedding a simple JavaScript code snippet. This widget, typically placed at the bottom right corner of your site, facilitates instant responses to product-related queries.\n\n![Image](https://github.com/user-attachments/assets/f0dc82db-c14d-4863-a242-c7da3a719568)\n\n## Deploy\n\n- [Deploy with Docker Compose](https://autoflow.tidb.ai/deploy-with-docker) (with: 4 CPU cores and 8GB RAM)\n\n## Tech Stack\n\n- [TiDB](https://www.pingcap.com/ai?utm_source=tidb.ai&utm_medium=community) – Database to store chat history, vector, json, and analytic\n- [LlamaIndex](https://www.llamaindex.ai/) - RAG framework\n- [DSPy](https://github.com/stanfordnlp/dspy) - The framework for programming—not prompting—foundation models\n- [Next.js](https://nextjs.org/) – Framework\n- [Tailwind CSS](https://tailwindcss.com/) – CSS framework\n- [shadcn/ui](https://ui.shadcn.com/) - Design\n\n## Contributing\n\nWe welcome contributions from the community. If you are interested in contributing to the project, please read the [Contributing Guidelines](/CONTRIBUTING.md).\n\n<a href=\"https://next.ossinsight.io/widgets/official/compose-last-28-days-stats?repo_id=752946440\" target=\"_blank\" style=\"display: block\" align=\"center\">\n  <picture>\n    <source media=\"(prefers-color-scheme: dark)\" srcset=\"https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=752946440&image_size=auto&color_scheme=dark\" width=\"655\" height=\"auto\">\n    <img alt=\"Performance Stats of pingcap/autoflow - Last 28 days\" src=\"https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=752946440&image_size=auto&color_scheme=light\" width=\"655\" height=\"auto\">\n  </picture>\n</a>\n<!-- Made with [OSS Insight](https://ossinsight.io/) -->\n\n## License\n\nAutoFlow is open-source under the Apache License, Version 2.0. You can [find it here](https://github.com/pingcap/autoflow/blob/main/LICENSE.txt).\n\n## Contact\n\nYou can reach out to us on [Discord](https://discord.gg/XzSW23Jg9p). \n"
  },
  {
    "path": "docs/src/content/_meta.ts",
    "content": "import type { Meta } from 'nextra';\n\nexport default {\n  \"-- Getting Started\": {\n    \"type\": \"separator\",\n    \"title\": \"Getting Started\"\n  },\n  \"index\": \"Introduction\",\n  \"quick-start\": \"Quick Start\",\n  \"resources\": \"Resources\",\n  \"faq\": \"FAQ\",\n  \"-- Deploy This RAG\": {\n    \"type\": \"separator\",\n    \"title\": \"Deployment\"\n  },\n  \"requirements\": \"Requirements\",\n  \"deploy-with-docker\": \"Deploy with Docker\",\n  \"-- Configuration\": {\n    \"type\": \"separator\",\n    \"title\": \"Configuration\"\n  },\n  \"llm\": {\n    \"title\": \"LLM - Large Language Model\"\n  },\n  \"embedding-model\": {\n    \"title\": \"Embedding Model\"\n  },\n  \"reranker-model\": {\n    \"title\": \"Reranker Model\"\n  },\n  \"knowledge-base\": {\n    \"title\": \"Knowledge Base\"\n  },\n  \"chat-engine\": {\n    \"title\": \"Chat Engine\"\n  },\n  \"evaluation\": {\n    \"title\": \"Evaluation\"\n  },\n  \"javascript\": {\n    \"title\": \"JS Widget\"\n  },\n  \"README\": {\n    \"display\": \"hidden\"\n  },\n  \"--\": {\n    \"type\": \"separator\"\n  },\n  \"releases\": \"Releases\"\n} satisfies Meta\n"
  },
  {
    "path": "docs/src/content/chat-engine.mdx",
    "content": "# Chat Engine\n\nChat Engine is a set of configurations that allow you to customize the chat experience for your users. You can configure the chat engine to use different models for generating responses, reranking results, and more.\n\n## Configure Chat Engine\n\nAfter logging in with an admin account, you can configure the Chat Engine in the admin panel.\n\n1. Click on the `Chat Engines` tab;\n2. Click on the `New Chat Engine` button to create a new chat engine;\n\n    ![\"Chat Engine Creation Page - Basic Information Section\"](https://github.com/user-attachments/assets/981a0adc-eac2-484d-8141-7d62c394fd0f )\n\n3. In the `Retrieval` section, you can configure [knowledge base](./knowledge-base.mdx) as the knowledge source and related retrieval parameters.\n\n    ![\"Chat Engine Configuration Page - Retrieval Section\"](https://github.com/user-attachments/assets/ed3f3320-a623-4ebb-a10e-d3bee264f20f)\n\n\n4. You can also change the prompt to customize the chat experience for your users. The prompt is the message that the chatbot sends to the user to start the conversation.\n\n    ![\"Chat Engine Configuration Page - Prompt Section\"](https://github.com/user-attachments/assets/21efccf0-093b-4243-87c8-159ef5975e3c)\n\n5. Click the `Create Chat Engine` button to finish the configuration.\n\n6. Now you can chat with LLM through the chat engine in the chat interface.\n\n    ![\"Chat Engine Chat Interface\"](https://github.com/user-attachments/assets/cf8766f5-889c-4a05-8841-a5f6fa72845e)\n\n"
  },
  {
    "path": "docs/src/content/deploy-with-docker.mdx",
    "content": "# Deploy with Docker Compose\n\nimport { Callout } from 'nextra/components'\n\nThis document provides instructions for deploying the entire Autoflow application using Docker Compose.\n\n## Prerequisites\n\n- Set up a TiDB cluster, you can use either:\n  - [TiDB Cloud Serverless](https://docs.pingcap.com/tidbcloud/tidb-cloud-quickstart)(recommended)\n  - [TiDB Self-Managed](https://docs.pingcap.com/tidb/stable/production-deployment-using-tiup)(>=v8.4).\n- Install [Docker Compose](https://docs.docker.com/compose/install/).\n\n## Deploy\n\n1. Clone the repository:\n\n    ```bash\n    git clone https://github.com/pingcap/autoflow.git;\n    cd autoflow/;\n    ```\n\n2. Copy and edit the `.env` file:\n\n    ```bash\n    cp .env.example .env\n    vim .env # or use another text editor to edit this file\n    ```\n\n    Replace the following placeholders with your own values:\n    - `SECRET_KEY`: you can generate a random secret key using:\n      ```python\n      `python3 -c \"import secrets; print(secrets.token_urlsafe(32))\"`\n      ```\n    - `TIDB_HOST`, `TIDB_USER`, `TIDB_PASSWORD` and `TIDB_DATABASE`: you can get these values from the TiDB cluster you set up before.\n      - Note: if you are using a self-managed TiDB cluster, you need to set `TIDB_SSL=false`.\n    - `EMBEDDING_MAX_TOKENS`: set them according to the embedding model you choose before, it can not be changed after the deployment.(We will remove it in the future, and move it to the admin panel)\n\n3. Migrate the database schema:\n\n    ```bash\n    docker compose run backend /bin/sh -c \"alembic upgrade head\"\n    ```\n\n4. Bootstrap the database with initial data:\n\n    ```bash\n    # Use default admin credentials (admin@example.com with random password)\n    docker compose run backend /bin/sh -c \"python bootstrap.py\"\n\n    # Or specify a custom admin email\n    docker compose run backend /bin/sh -c \"python bootstrap.py --email new-admin@example.com\"\n    ```\n\n    Running the bootstrap script creates an admin user. You can find the username and password in the output.\n\n    ```bash\n    # Reset admin password (random generated)\n    docker compose run backend /bin/sh -c \"python bootstrap.py -r\"\n\n    # Or specify a new password\n    docker compose run backend /bin/sh -c \"python bootstrap.py -r --password <new_password>\"\n    ```\n\n5. Start the services:\n\n    If you are using a SaaS embedding model, start the services with the following command:\n\n    ```bash\n    docker compose up\n    ```\n\n    If you want to use the built-in local embedding reranker, start the services with the following command:\n\n    ```bash\n    docker compose --profile local-embedding-reranker up\n    ```\n\n6. Done! Now you can open your browser and visit [http://localhost:3000](http://localhost:3000) locally\n\n## Configuration\n\nAfter you deploy the application, you need to initialize the application by following [quick start guide](./quick-start.mdx).\n\n1. Set up the default [LLM model](./llm.mdx) in the **Models > LLMs** page.\n\n2. Set up the default [Embedding model](./embedding-model.mdx) in the **Models > Embedding Models** page.\n\n3. Add a new [Knowledge Base](./knowledge-base.mdx) in the **Knowledge Bases** page.\n\n4. Configure default [Chat Engine](./chat-engine.mdx) and set up the new knowledge base as the retrieval database.\n\n## Upgrade\n\nThis section will help you upgrade pingcap/autoflow to the new version.\n\nSuppose you want to upgrade pingcap/autoflow from 0.3.0 to 0.3.1. Follow these steps:\n\n1. Edit your docker-compose.yml file to use the new image version.\n\n    ```yaml\n    services:\n      backend:\n        image: tidbai/backend:0.3.1\n      frontend:\n        image: tidbai/frontend:0.3.1\n      background:\n        image: tidbai/backend:0.3.1\n    ```\n\n2. Pull the new image:\n\n    ```bash\n    docker compose pull\n    ```\n\n3. Migrate the database schema:\n\n    ```bash\n    docker compose run backend /bin/sh -c \"alembic upgrade head\"\n    ```\n\n4. Recreate the docker containers:\n\n    ```bash\n    docker compose up -d --force-recreate\n    ```\n\n5. Check the logs to ensure everything is working correctly:\n\n    ```bash\n    docker compose logs -f\n    ```\n\n6. Done!\n"
  },
  {
    "path": "docs/src/content/embedding-model.mdx",
    "content": "# Embedding Model\n\nimport { Callout } from 'nextra/components'\n\nThe Embedding Model converts given input into numerical vectors (embeddings) that represent the semantic meaning of the input text.\n\nIn Autoflow, we use the Embedding Model to vectorize documents and store them in TiDB. This enables us to leverage TiDB's Vector Search capability to retrieve relevant documents for user queries.\n\n## Configure Embedding Model\n\nAfter logging in with an admin account, you can configure the Embedding Model in the admin panel.\n\n1. Click on the `Models > Embedding Models` tab;\n2. Click the `New Embedding Model` button, select your preferred embedding model provider, and configure the model parameters.\n\n    ![Add Embedding Model](https://github.com/user-attachments/assets/70c9f8d7-0e6a-46e7-909f-03f94062d5e2)\n\n## Supported Providers\n\nCurrently Autoflow supports the following embedding model providers:\n\n### OpenAI\n\nOpenAI provides a variety of Embedding Models, we recommend using the OpenAI `text-embedding-3-small` model due to its performance and compatibility with Autoflow.\n\n**Supported Models**:\n\n| Embedding Model          | Vector Dimensions | Max Tokens |\n| ------------------------ | ----------------- | ---------- |\n| `text-embedding-3-small` | 1536              | 8191       |\n\n\nFor more information, see the [OpenAI Embedding Models documentation](https://platform.openai.com/docs/guides/embeddings#embedding-models).\n\n### OpenAI-Like\n\nAutoflow also supports embedding model providers (such as [ZhipuAI](#zhipuai)) that conform to the OpenAI API specification.\n\nYou can also use models deployed on local AI model platforms (such as [vLLM](#vllm) and [Xinference](https://inference.readthedocs.io/en/latest/index.html)) that conform to the OpenAI API specification in Autoflow.\n\nTo use OpenAI-Like embedding model providers, you need to provide the **base URL** of the embedding API as the following JSON format in **Advanced Settings**:\n\n```json\n{\n    \"api_base\": \"{api_base_url}\"\n}\n```\n\n#### ZhipuAI BigModel\n\nFor example, the embedding API endpoint for ZhipuAI is:\n\n`https://open.bigmodel.cn/api/paas/v4/embeddings`\n\nYou need to set up the base URL in the **Advanced Settings** as follows:\n\n```json\n{\n    \"api_base\": \"https://open.bigmodel.cn/api/paas/v4/\"\n}\n```\n\n**Supported Models**:\n\n| Embedding Model | Vector Dimensions | Max Tokens |\n| --------------- | ----------------- | ---------- |\n| `embedding-3`   | 2048              | 8192       |\n\nFor more information, see the [ZhipuAI embedding models documentation](https://open.bigmodel.cn/dev/api/vector/embedding-3).\n\n#### vLLM\n\nWhen serving locally, the default embedding API endpoint for vLLM is:\n\n`http://localhost:8000/v1/embeddings`\n\nYou need to set up the base URL in the **Advanced Settings** as follows:\n\n```json\n{\n    \"api_base\": \"http://localhost:8000/v1/\"\n}\n```\n\nFor more information, see the [vLLM documentation](https://docs.vllm.ai/en/stable/).\n\n### JinaAI\n\nJinaAI provides multimodal multilingual long-context Embedding Models for RAG applications.\n\n**Supported Models**:\n\n| Embedding Model      | Vector Dimensions | Max Tokens |\n| -------------------- | ----------------- | ---------- |\n| `jina-clip-v1`       | 768               | 8192       |\n| `jina-embeddings-v3` | 1024              | 8192       |\n\nFor more information, see the [JinaAI embedding models documentation](https://jina.ai/embeddings/).\n\n### Cohere\n\nCohere provides industry-leading large language models (LLMs) and RAG capabilities tailored to meet the needs of enterprise use cases that solve real-world problems.\n\n**Supported Models**:\n\n| Embedding Model           | Vector Dimensions | Max Tokens |\n| ------------------------- | ----------------- | ---------- |\n| `embed-multilingual-v3.0` | 1024              | 512        |\n\nFor more information, see the [Cohere Embed documentation](https://docs.cohere.com/docs/cohere-embed).\n\n### Amazon Bedrock\n\nAmazon Bedrock is a fully managed foundation models service that provides a range of large language models and embedding models.\n\n**Featured Models**:\n\n| Embedding Model                 | Vector Dimensions | Max Tokens |\n| ------------------------------- | ----------------- | ---------- |\n| `amazon.titan-embed-text-v2:0`  | 1024              | 8192       |\n| `amazon.titan-embed-text-v1`    | 1536              | 8192       |\n| `amazon.titan-embed-g1-text-02` | 1536              | 8192       |\n| `cohere.embed-english-v3`       | 1024              | 512        |\n| `cohere.embed-multilingual-v3`  | 1024              | 512        |\n\nTo check all embbeding models supported by Bedrock, go to [Bedrock console](https://console.aws.amazon.com/bedrock).\n\nTo use Amazon Bedrock, you'll need to provide a JSON Object of your AWS Credentials, as described in the [AWS CLI config global settings](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-global):\n\n```json\n{\n    \"aws_access_key_id\": \"****\",\n    \"aws_secret_access_key\": \"****\",\n    \"aws_region_name\": \"us-west-2\"\n}\n```\n\nFor more information, see the [Amazon Bedrock documentation](https://docs.aws.amazon.com/bedrock/).\n\n### Ollama\n\nOllama is a lightweight framework for building and running large language models and embedding models locally.\n\n**Supported Models**:\n\n| Embedding Model    | Vector Dimensions | Max Tokens |\n| ------------------ | ----------------- | ---------- |\n| `nomic-embed-text` | 768               | 8192       |\n| `bge-m3`           | 1024              | 8192       |\n\nTo use Ollama, you'll need to configure the API base URL in the **Advanced Settings**:\n\n```json\n{\n    \"base_url\": \"http://localhost:11434\"\n}\n```\n\nFor more information, see the [Ollama embedding models documentation](https://ollama.com/blog/embedding-models).\n\n### Gitee AI\n\nGitee AI is a third-party model provider that offers ready-to-use cutting-edge model APIs for AI developers.\n\n**Supported Models**:\n\n| Embedding Model     | Vector Dimensions | Max Tokens |\n| ------------------- | ----------------- | ---------- |\n| `bge-m3`            | 1024              | 8192       |\n| `bge-large-zh-v1.5` | 1024              | 512        |\n| `bge-small-zh-v1.5` | 512               | 512        |\n\nFor more information, see the [Gitee AI embedding models documentation](https://ai.gitee.com/docs/openapi/v1#tag/%E7%89%B9%E5%BE%81%E6%8A%BD%E5%8F%96/POST/embeddings).\n\n### Azure OpenAI\n\nAzure OpenAI is a cloud-based AI service that provides a OpenAI-like API on Azure.\n\n**Supported Models**:\n\n| Embedding Model          | Vector Dimensions | Max Tokens |\n| ------------------------ | ----------------- | ---------- |\n| `text-embedding-3-small` | 1536              | 8191       |\n\nFor more information, see:\n\n- [Azure OpenAI documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference)\n- [Create and deploy an Azure OpenAI Service resource](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal)\n\nAfter creating the Azure OpenAI Service resource, you can configure the API base URL in the **Advanced Settings**:\n\n```json\n{\n  \"azure_endpoint\": \"https://<your-resource-name>.openai.azure.com/\",\n  \"api_version\": \"<your-api-version>\"\n}\n```\n\nYou can find those parameters in the [Deployment Tab](https://ai.azure.com/resource/deployments) of your Azure OpenAI Service resource.\n\n![Azure OpenAI Service Deployment Tab - Embedding](https://github.com/user-attachments/assets/af8ab2b7-0f2f-40ef-86ee-124bad2d9598)\n\n\n### Local Embedding Server\n\nAutoflow's local embedding server is a self-hosted embedding service built upon [sentence-transformers](https://www.sentence-transformers.org/) and deployed on your own infrastructure.\n\nYou can choose from a variety of pre-trained models from [Hugging Face](https://huggingface.co/models), such as:\n\n| Embedding Model | Vector Dimensions | Max Tokens |\n| --------------- | ----------------- | ---------- |\n| `BAAI/bge-m3`   | 1024              | 8192       |\n\nTo configure the Local Embedding Service, set the API URL in the **Advanced Settings**:\n\n```json\n{\n    \"api_url\": \"http://local-embedding-reranker:5001/api/v1/embedding\"\n}\n```\n"
  },
  {
    "path": "docs/src/content/evaluation.mdx",
    "content": "# Evaluation (beta)\n\nThe **Evaluation** module is an integral part of the Chat Engine of the AutoFlow, designed to assess the performance and reliability of the Chat Engine's outputs.\n\nCurrently, the module provides evaluations based on two key metrics:\n\n1. **Factual Correctness**: This metric measures the degree to which the generated responses align with verified facts. It ensures that the Chat Engine delivers accurate and trustworthy information.\n\n2. **Semantic Similarity**: This metric evaluates the closeness in meaning between the generated responses and the expected outputs. It helps gauge the contextual relevance and coherence of the Chat Engine's performance.\n\nWith these metrics, the Evaluation component empowers developers and users to analyze and optimize the Chat Engine's capabilities effectively.\n\n## Prerequisites\n\n- An admin account to access the Evaluation panel.\n- (Optional) A CSV dataset with at least two columns:\n    - `query`: i.e. question.\n    - `reference`: i.e. expected answer.\n\n## How to Evaluate\n\nTo evaluate the Chat Engine, follow these steps:\n\n1. Create an evaluation dataset:\n\n    1. Click on the **Evaluation** in the left panel, and then click the **Datasets** button.\n\n        ![\"Evaluation - Datasets\"](https://github.com/user-attachments/assets/42c900e3-da9d-4891-a064-50ddf4af21e3 )\n\n    2. Click on the **New Evaluation Dataset** button.\n    3. Type in the dataset name, and if you have a CSV file with the required columns, you can upload it to initial the evaluation dataset.\n\n        ![\"Evaluation - New Evaluation Dataset\"](https://github.com/user-attachments/assets/f5c6d454-04a9-4108-8072-0abedb879b66 )\n\n    4. Click on the **Create** button.\n\n2. Create an evaluation task:\n\n    1. Click on the **Evaluation** in the left panel, and then click the **Tasks** button.\n    2. Click on the **New Evaluation Task** button.\n    3. Type in the task name, select the evaluation dataset, select the evaluation targeting Chat Engine, and type in the run size.\n\n        > **Note:**\n        >\n        > The **Run Size** is a parameter that can cut your dataset into smaller amount to evaluation task.\n        >\n        > - For example, your dataset has 1000 rows, and you set the run size to 100, then the evaluation task will only evaluate the first 100 rows.\n        > - Run size cannot change the evaluation dataset, it only changes the amount of data to evaluation task.\n\n        ![\"Evaluation - New Evaluation Task\"](https://github.com/user-attachments/assets/b8030ae5-0284-4255-a5b5-d55b00c294ed )\n\n    4. Click on the **Create** button.\n\n3. Waiting for the evaluation task to finish, and you can see the evaluation result in the task detail.\n\n    1. Click on the **Evaluation** in the left panel, and then click the **Tasks** button.\n    2. Click on the **Name** of the task you want to see the result.\n    3. Make your insight from the evaluation result.\n\n        ![\"Evaluation - Task Detail\"](https://github.com/user-attachments/assets/21f9f366-dab7-4904-9693-e95c032fb441 )\n"
  },
  {
    "path": "docs/src/content/faq.mdx",
    "content": "## FAQ of this RAG app\n\nThe FAQ is a collection of useful questions and answers about the\nproject. If you have a question that isn't answered here, please\n[open a discussion](https://github.com/pingcap/autoflow/discussions).\n\nexport function FAQBox({ title, children }) {\n  return (\n    <details\n      close=\"true\"\n      className=\"last-of-type:mb-0 rounded-lg p-2 mt-4\"\n    >\n      <summary>\n        <strong className=\"text-lg\">{title}</strong>\n      </summary>\n      <div className=\"nx-p-2\">{children}</div>\n    </details>\n  )\n}\n\n\n<FAQBox title=\"What's the artechitecture or workflow of this Graph RAG app?\">\n\n![Graph RAG app architecture](https://github.com/user-attachments/assets/a21e1bb7-3a9f-49ed-aa6a-de8be0ada17f)\n\n</FAQBox>\n\n<FAQBox title=\"Is this RAG app free?\">\n- [Free] The code of this RAG app pingcap/autoflow is open source, it's free to use;\n- [Free] The database of this RAG app is free for **25GiB storage** as we use [TiDB Cloud Serverless](https://www.pingcap.com/tidb-cloud-serverless/?utm_source=tidb.ai&utm_medium=community);\n- [Paid] **But** the token of OpenAI is paid, you should have an account of https://platform.openai.com for an API token.\n</FAQBox>\n\n<FAQBox title=\"Which LLMs are supported/tested by this RAG app?\">\n  AutoFlow uses LLM to extract knowledge graph from docs and generate responses to user queries, so the LLM is the core of this RAG app, the Smarter LLM, the better the performance.\n  Currently the online demo(https://tidb.ai) is using OpenAI as chat model by default, and the following LLMs are supported/tested by this RAG app:\n\n- [gpt-4/gpt-4o/gpt-4o-mini/o1/o1-mini](https://platform.openai.com/docs/models)\n- [gemini-1.5-pro/gemini-2.0-flash](https://gemini.google.com/)\n- [claude-3.5-sonnet](https://claude.ai/)\n- [glm-4-plus](https://bigmodel.cn/)\n- ...\n\n</FAQBox>\n\n<FAQBox title=\"Can I use a different embedding/reranker API?\">\n  Yes\n\n- [Change Default Reranker](/docs/reranker-model)\n- [Change Default Embedding API](/docs/embedding-model)\n\n</FAQBox>\n\n<FAQBox title=\"Why use TiDB which is a MySQL compatible database to store Knowledge Graph?\">\n  - TiDB is a distributed MySQL SQL database, it's easy to scale out and has a good performance;\n  - TiDB is a multi-model and all-in-one database, it supports MySQL SQL, JSON, Vector, and more, which can replace multiple databases;\n  - The performance of TiDB is enough to store and query the Knowledge Graph, as we just search the first-level or second-level neighbors of the node.\n</FAQBox>"
  },
  {
    "path": "docs/src/content/index.mdx",
    "content": "import ReadmeContent from \"./README.md\";\n\n<ReadmeContent />\n"
  },
  {
    "path": "docs/src/content/javascript.mdx",
    "content": "# Embeddable JavaScript Snippet\n\nWhat if you could integrate a conversational search window into your website to facilitate instant responses to product-related queries? With the embeddable JavaScript snippet, you can do just that. This feature allows you to copy and embed a simple JavaScript code snippet into your website, typically placed at the bottom right corner of your site.\n\n## Pre-requisites\n\nIf you want to embed the JavaScript snippet into your other website, you need to enable it in `.env` file. \n\n* Step 1: Enable the JavaScript Widget\n\n```bash\n# JS Widgets: if you want to use JS widgets, you need to set the following variables to enable CORS.\nBACKEND_CORS_ORIGINS=https://your-domain.com\n```\n\n* Step 2: Restart your service\n\n## How to Embed\n\nTo embed the JavaScript snippet, follow these steps:\n\n1. Enter Settings > JavaScript, and copy the JavaScript code snippet like this:\n\n![embeddable-javascript-snippet](https://github.com/user-attachments/assets/5322dc31-d4d5-492f-87dd-dd976c245ac3)\n\n2. Paste the JavaScript code snippet into your website's HTML file;\n\n3. Initialize the JavaScript snippet with input parameters like `Widget Title`, `Site Name`, `Search Titles`, `Example Questions`, `Footer Links`, and `Social Media Links`;\n\n4. Restart your website to see the conversational search window appear at the bottom right corner of your site.\n\n![embeddable-javascript-snippet](https://github.com/user-attachments/assets/f0dc82db-c14d-4863-a242-c7da3a719568 \"Image Title\")\n\nThat's all you need to do to integrate our conversational search window into your website. Enjoy! You can also go to https://tidb.ai and https://docs.pingcap.com to see the chat window in action.\n"
  },
  {
    "path": "docs/src/content/knowledge-base.mdx",
    "content": "import { Callout } from 'nextra/components'\n\n# Knowledge Base\n\nKnowledge base is a collection of documents and represents a certain knowledge domain, which can be used by LLM to answer users' questions.\n\n## Configure Knowledge Base\n\nAfter logging in with an admin account, you can configure the Knowledge Base in the admin panel.\n\n1. Click on the `Knowledge Bases` tab;\n2. Click on the `New Knowledge Base` button to add a new knowledge base;\n\n    ![Knowledge Base Creation](https://github.com/user-attachments/assets/61a332b0-b1de-42f2-a107-6b08c743b12d)\n\n    Set up the following fields and click `Create` to create the knowledge base.\n\n    | Field            | Description                                                                          | Required | Updatable |\n    | ---------------- | ------------------------------------------------------------------------------------ | -------- | --------- |\n    | Name             | The name of the knowledge base.                                                      | Yes      | Yes       |\n    | Description      | Describe the knowledge base.                                                         | No       | Yes       |\n    | LLM              | The LLM used during building vector index and knowledge graph index.                 | Yes      | No        |\n    | Embedding Model  | the embedding model used by vector index and knowledge graph index.                  | Yes      | No        |\n    | Indexing Methods | Vector index is forced to be enabled. The knowledge graph index is used by GraphRAG. | Yes      | No        |\n\n\n3. Click the `Data Sources` tab and add a new data source.\n\n4. Once the data source added, Autoflow will asynchronously import and index documents from the data source, you can check the progress in the `Index Process` tab.\n\n5. Go to `Chat Engine` configuration page, select the knowledge base you created and click `Save` to enable it.\n\n    ![Chat Engine Configuration](https://github.com/user-attachments/assets/2572dc02-ce77-4d2f-a4ba-68bc6858d44c)\n\n\n## Data Source Management\n\nEach Knowledge Base could contain multiple data sources.\n\nYou can click the **Data Sources** tab to view all data sources.\n\n![Data Sources Page](https://github.com/user-attachments/assets/dc865796-3660-4e25-9218-9ad4efd0fb97)\n\n### Create Data Source\n\nYou can click the button on top of data sources list to create new data sources.\n\nCurrently, AutoFlow supports three types of data sources:\n\n- File uploads from local:\n  - Markdown (\\*.md)\n  - PDF (\\*.pdf)\n  - Microsoft Word (\\*.docx)\n  - Microsoft PowerPoint (\\*.pptx)\n  - Microsoft Excel (\\*.xlsx)\n  - Text (\\*.txt) files\n- Website pages by sitemap.xml\n- Certain web pages by URL\n\n### Update Data source\n\nYou can rename data source by click the **Configure** button of data source.\n\n### Delete Data source\n\nYou can delete data source by click the **Delete** button of data source.\n\n<Callout>\n  **This operation cannot be undone**.\n\n  Deleting data sources will trigger an **asynchronous** deletion process for related **documents**, **chunks of vector index**\n  and **entities and relationships of knowledge graph index**.\n</Callout>\n\n## Documents Management\n\nYou can manage documents in the **Documents** tab.\n\n![Documents Page](https://github.com/user-attachments/assets/878d2809-97a6-4a87-8d3a-3481f8bb863b)\n\n#### Delete Documents\n\nScroll horizontally to the end of the documents table, click the `...` button and click the `Delete` item on the dropdown menu.\n\n<Callout>\n  **This operation cannot be undone**.\n\n  Deleting documents will trigger an **asynchronous** deletion process for related **chunks of vector index**\n  and **entities and relationships of knowledge graph index**.\n</Callout>\n"
  },
  {
    "path": "docs/src/content/llm.mdx",
    "content": "# LLM - Large Language Model\n\nIn this app, LLM is used for several purposes:\n1. Extracting knowledge from docs;\n2. Generating responses to user queries.\n\n## Configure LLM\n\nAfter logging in with an admin account, you can configure the LLM in the admin panel.\n\n1. Click on the `Models > LLMs` tab;\n2. Click on the `New LLM` button to add a new LLM;\n\n    ![llm-config](https://github.com/user-attachments/assets/993eec34-a99a-4acf-b4b7-a4ee8e28e3d5 \"LLM Config\")\n\n3. Input your LLM information and click `Create LLM` button;\n4. Done!\n\nimport { Callout } from 'nextra/components'\n\n<Callout>\nIf you want to use the new LLM while answering user queries, you need switch to `Chat Engines` tab and set the new LLM as LLM.\n</Callout>\n\n## Supported LLM providers\n\nCurrently Autoflow supports the following LLM providers:\n\n### Amazon Bedrock\n\nTo use Amazon Bedrock, you'll need to provide a JSON Object of your AWS Credentials, as described in the [AWS CLI config global settings](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-global):\n\n```json\n{\n    \"aws_access_key_id\": \"****\",\n    \"aws_secret_access_key\": \"****\",\n    \"aws_region_name\": \"us-west-2\"\n}\n```\n\nTo learn more about Amazon Bedrock, please visit [Amazon Bedrock](https://aws.amazon.com/bedrock/).\n\n### Google Gemini\n\nTo learn more about Google Gemini, please visit [Google Gemini](https://gemini.google.com/).\n\n### Google Vertex AI\n\nTo learn more about Vertex AI, please visit [Vertex AI](https://cloud.google.com/vertex-ai).\n\n### Gitee AI\n\nFollow the UI to configure the Gitee AI provider. To learn more about Gitee AI, please visit [Gitee AI](https://ai.gitee.com/serverless-api).\n\n### OpenAI\n\nTo learn more about OpenAI, please visit [OpenAI](https://platform.openai.com/).\n\n### OpenAI-Like\n\nAutoflow also support the providers that conform to the OpenAI API specification.\n\nTo use OpenAI-Like LLM providers, you need to provide the **api_base** of the LLM API as the following JSON format in **Advanced Settings**:\n\n```json\n{\n    \"api_base\": \"{api_base_url}\"\n}\n```\n\n#### OpenRouter\n\nDefault config:\n\n```json\n{\n   \"api_base\": \"https://openrouter.ai/api/v1/\"\n}\n```\n\nTo learn more about OpenRouter, please visit [OpenRouter](https://openrouter.ai/).\n\n{/*\n#### ZhipuAI BigModel\n\nDefault config:\n\n```json\n{\n    \"api_base\": \"https://open.bigmodel.cn/api/paas/v4/\",\n    \"is_chat_model\": true\n}\n```\n\nTo learn more about BigModel, please visit [BigModel](https://open.bigmodel.cn/). \n*/}\n\n#### Ollama\n\nDefault config:\n\n```json\n{\n    \"api_base\": \"http://localhost:11434\"\n}\n```\n\nTo learn more about Ollama, please visit [Ollama](https://ollama.com/).\n\n#### vLLM\n\nDefault config:\n\n```json\n{\n    \"api_base\": \"http://localhost:8000/v1/\"\n}\n```\n\nTo learn more about vLLM, please visit [vLLM](https://docs.vllm.ai/en/stable/).\n\n#### Xinference\n\nIf you assigned a model uid different from the model name, you need to fill in **model uid** in the box `model`.\n\nDefault config:\n\n```json\n{\n    \"api_base\": \"http://localhost:9997/v1/\"\n}\n```\n\nTo learn more about Xinference, please visit [Xinference](https://inference.readthedocs.io/en/latest/).\n\n\n#### Azure OpenAI\n\nTo learn more about Azure OpenAI, please visit:\n\n- [Azure OpenAI documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference)\n- [Create and deploy an Azure OpenAI Service resource](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal)\n\nAfter creating the Azure OpenAI Service resource, you can configure the API base URL in the **Advanced Settings**:\n\n```json\n{\n  \"azure_endpoint\": \"https://<your-resource-name>.openai.azure.com/\",\n  \"api_version\": \"<your-api-version>\",\n  \"engine\": \"<your-deployment-name>\"\n}\n```\n\nYou can find those parameters in the [Deployment Tab](https://ai.azure.com/resource/deployments) of your Azure OpenAI Service resource.\n\n<Callout>\nDo not mix `Model version` and `API version` up, they are different.\n</Callout>\n\n![Azure OpenAI Service Deployment Tab - LLM](https://github.com/user-attachments/assets/158f845c-5f38-40d7-b66a-8528d7df178e)\n\n#### Novita AI\n\nDefault config:\n\n```json\n{\n    \"api_base\": \"https://api.novita.ai/v3/openai\"\n}\n```\n\nTo learn more about Novita AI, please visit [Novita AI](https://novita.ai/).\n\n#### DeepSeek\n\nDeepSeek provides chat model `deepseek-chat`.\n\nDefault config:\n\n```json\n{\n    \"api_base\": \"https://api.deepseek.com/v1\",\n    \"is_chat_model\": true\n}\n```\n\nTo learn more about DeepSeek, please visit [DeepSeek](https://www.deepseek.com/).\n"
  },
  {
    "path": "docs/src/content/quick-start.mdx",
    "content": "# Quick Start\n\nHere is a documentation page that shows how to setup a same tool like https://tidb.ai from deployment to usage. \n\n## Step 1: Deployment\n\nYou can deploy self-hosted Autoflow on your server with Docker Compose.\n\n[Read the deployment guide](./deploy-with-docker)\n\n\n## Step 2: Configure\n\nAfter deployment, you need to login to the admin dashboard to configure the tool withyour own settings.\n\n### Configure the LLM - Large Language Model\n\nGo to the **Models > LLMs** page to [configure the LLM model](./llm).\n\n> The LLM is used for extracting knowledge from docs and generating responses. You can change the default LLM to another one.\n\n\n![Set up LLM model](https://github.com/user-attachments/assets/c343c1bb-1c82-4fab-a3b9-72987d271a45)\n\n### Configure the Embedding Model\n\nGo to the **Models > Embedding Models** page to [configure the embedding model](./embedding-model).\n\n> The Embedding Model is a machine learning model that is trained to generate embeddings for a given input. We must translate text to vectors with this model before insert vector to database.\n\n![Set up Embedding model](https://github.com/user-attachments/assets/2d78b771-d759-481c-a2ef-92333281ff1e)\n\n### Configure the Reranker [Optional]\n\n> The Reranker is an essential tool that optimizes the order of results from initial searches. It is optional but recommended.\n\nGo to the **Models > Rerankers** page to configure [the reranker model](./reranker-model).\n\n![Set up Reranker](https://github.com/user-attachments/assets/96d187f2-23f6-49fd-a2bb-7c241a438b07)\n\n\n## Step 3: Add a New Knowledge Base and Upload Documents\n\nGo to the **Knowledge Base** page to add a new knowledge base and upload documents.\n\n![Add Knowledge Base](https://github.com/user-attachments/assets/f78be4ac-0211-48bf-9706-bb36240414cd)\n\nAfter adding a new knowledge base, you can upload your documents from local or crawl from the web in the **Data Source** subpage.\n\n![Add Data Source to Knowledge Base](https://github.com/user-attachments/assets/506db914-d73a-4625-a119-461fdb73ba8e)\n\n> After adding data source, there will be a period of time for indexing the data.\n\nFor more details, please refer to [Knowledge Base](./knowledge-base) documentation.\n\n## Step 4: Set up the Chat Engine\n\nGo to the **Chat Engines** page to [set up the chat engine](./chat-engine).\n\n> The chat engine is used to chat with users.\n\n![Set up Chat Engine](https://github.com/user-attachments/assets/2572dc02-ce77-4d2f-a4ba-68bc6858d44c)\n\n\n## Step 5: Usage\n\nAfter deployment, configuration and uploading documents, you can use the tool to chat with users to answer their questions.\n\npingcap/autoflow provides several features to help you chat with users:\n\n1. Out-of-the-box chat interface, e.g. https://tidb.ai\n2. API to chat with users programmatically, e.g. https://tidb.ai/api-docs\n3. Embeddable chat widget to integrate with your website"
  },
  {
    "path": "docs/src/content/releases/_meta.ts",
    "content": "import type { Meta } from 'nextra';\n\nexport default {\n  \"v0.4.0\": \"v0.4.0\",\n  \"v0.3.0\": \"v0.3.0\",\n  \"v0.2.0\": \"v0.2.0\",\n  \"v0.1.0\": \"v0.1.0\",\n} satisfies Meta\n"
  },
  {
    "path": "docs/src/content/releases/index.mdx",
    "content": "---\ntitle: Releases\nasIndexPage: true\n---\n\n# Releases\n\nimport { Cards } from 'nextra/components'\n \n<Cards>\n  <Cards.Card\n    title=\"v0.4.0\"\n    href=\"/releases/v0.4.0\"\n  />\n  <Cards.Card\n    title=\"v0.3.0\"\n    href=\"/releases/v0.3.0\"\n  />\n  <Cards.Card\n    title=\"v0.2.0\"\n    href=\"/releases/v0.2.0\"\n  />\n  <Cards.Card\n    title=\"v0.1.0\"\n    href=\"/releases/v0.1.0\"\n  />\n</Cards>"
  },
  {
    "path": "docs/src/content/releases/v0.1.0.md",
    "content": "# Release Notes for v0.1.0\n\n## Overview\n\nThis is the first release of TiDB.AI.\n\n## New Features\n\n- LLM Model Support\n    - OpenAI\n    - Gemini\n    - OpenAI-Like\n        - openrouter\n        - zhipuai\n        - ...\n    - AnthropicVertex\n    - Bedrock\n- Rerank Model Support\n    - Jina\n    - Cohere\n- Embedding Model Support\n    - OpenAI\n- Data Source Support\n    - Website\n        - Single URL\n        - Sitemap\n    - Local file\n        - .pdf\n        - .md\n        - .txt\n\n## Known Issues\n\nFor a list of known issues, please visit our [GitHub Issues page](https://github.com/pingcap/autoflow/issues). If you encounter any problems, we encourage you to report them.\n"
  },
  {
    "path": "docs/src/content/releases/v0.2.0.md",
    "content": "# Release Notes for v0.2.0\n\n## Improvements\n\n- Added an end-to-end test flow after releasing new versions to enhance system robustness.\n- Optimized the UI during answer retrieval to provide better feedback on current processes.\n- Support for connecting to Self-Managed TiDB clusters that also have vector search capabilities.\n- Integrated support for the BaiSheng reranker.\n\n## Bug Fixes\n\n- Fixed an issue when creating synopsis entity nodes.\n- Fix the delete button in LLM/Reranker list page.\n- Fixed a SiteSetting cache issue that prevented proper synchronization across processes.\n\n\nIf you are deploying tidb.ai using docker, please refer to this [document](https://autoflow.tidb.ai/deploy-with-docker#upgrade) for upgrading your tidb.ai.\n"
  },
  {
    "path": "docs/src/content/releases/v0.3.0.md",
    "content": "# Release Notes for v0.3.0\n\n## Highlights\n\n- Rename project to `autoflow`\n- Multiple Knowledge Bases support\n- Support new LLM providers\n  - [OpenRouter](../llm.mdx#openrouter)\n  - [ZhipuAI BigModel](../llm.mdx#zhipuai-bigmodel)\n  - [Ollama](../llm.mdx#ollama)\n- Support new embedding models providers\n  - [Ollama](../embedding-model.mdx#ollama)\n  - Support [OpenAI Like](../embedding-model.mdx#openai-like) embedding model providers\n    - [ZhipuAI BigModel](../embedding-model.mdx#zhipuai-bigmodel)\n\n## Breaking Changes\n\n- [Knowledge Base](/docs/knowledge-base)\n    - Deprecate old datasource management. [Manual migration](#manual-migration) is required.\n    - Support different Embedding Models for each KB\n    - Individual vector index and knowledge graph index for each KB\n    - Move vector search and graph search API to knowledge base level\n    - Move index progress and error retries to knowledge base\n\n## Improvements\n\n- Support create chat engine from default config\n\n\n## Manual Migration\n\n### Migrating from v0.2.x to v0.3.0\n\nIn previous versions, there was no concept of Knowledge Base, and Autoflow stored index data in the following tables:\n\n- `chunks`\n- `entities`\n- `relationships`\n\nIn the v0.3.0, index data will be stored separately in tables corresponding to each knowledge base: \n\n- `chunks_{knowledge_base_id}`\n- `entities_{knowledge_base_id}`\n- `relationships_{knowledge_base_id}`\n\nAfter upgrading to v0.3.0, the data in the original tables will be **preserved**. If you want to migrate the old data to the new knowledge base, you can follow the steps below:\n\n1. Follow the [Upgrade](../deploy-with-docker.mdx#upgrade) guide to upgrade the application to v0.3.0.\n2. Create a new knowledge base in the admin panel.\n3. Connect to your database using TiDB Serverless Web Console or using mysql client.\n4. Obtain the new knowledge base ID:\n\n    ```sql\n    SELECT id, name FROM knowledge_bases;\n    ```\n\n5. Replace the `{knowledge_base_id}` in the following SQL scripts with the new knowledge base ID and execute them:\n\n    ```sql\n    BEGIN;\n\n    INSERT INTO knowledge_base_datasources (knowledge_base_id, data_source_id)\n    SELECT {knowledge_base_id}, id\n    FROM data_sources\n    WHERE id NOT IN (SELECT data_source_id FROM knowledge_base_datasources);\n\n    UPDATE documents SET knowledge_base_id = {knowledge_base_id} WHERE knowledge_base_id IS NULL;\n\n    INSERT INTO chunks_{knowledge_base_id} (id, hash, text, meta, embedding, document_id, relations, source_uri, index_status, index_result, created_at, updated_at)\n    SELECT id, hash, text, meta, embedding, document_id, relations, source_uri, index_status, index_result, created_at, updated_at\n    FROM chunks;\n\n    INSERT INTO entities_{knowledge_base_id} (id, name, description, meta, entity_type, synopsis_info, description_vec, meta_vec)\n    SELECT id, name, description, meta, entity_type, synopsis_info, description_vec, meta_vec\n    FROM entities;\n\n    INSERT INTO relationships_{knowledge_base_id} (id, description, meta, weight, source_entity_id, target_entity_id, last_modified_at, document_id, chunk_id, description_vec)\n    SELECT id, description, meta, weight, source_entity_id, target_entity_id, last_modified_at, document_id, chunk_id, description_vec\n    FROM relationships;\n\n    COMMIT;\n    ```\n"
  },
  {
    "path": "docs/src/content/releases/v0.4.0.md",
    "content": "# Release Notes for v0.4.0\n\n## Highlights\n\n- Support [Evaluation (beta)](../evaluation.mdx) tool to evaluate the performance and reliability of the Chat Engine’s outputs.\n  - Current support key metrics:\n    - Factual Correctness\n    - Semantic Similarity\n- Support new LLM providers\n  - [Gitee AI](../llm.mdx#gitee-ai)\n  - Test new OpenAI-like providers\n    - [vLLM](../llm.mdx#vllm)\n    - [Xinference](../llm.mdx#xinference)\n- Support new embedding model providers\n  - [Gitee AI](../embedding-model.mdx#gitee-ai)\n  - [Amazon Bedrock](../embedding-model.mdx#amazon-bedrock)\n\n## Improvements\n\n- Limit the upload file size via `max_upload_file_size` (10MB by default) parameter on site setting\n- Support download the reference file in the chat page\n\nIf you are deploying Autoflow using docker, please follow the [Upgrade](../deploy-with-docker.mdx#upgrade) guide to upgrade your Autoflow.\n"
  },
  {
    "path": "docs/src/content/requirements.mdx",
    "content": "# Deployment Requirements\nIn this section, we will cover the requirements for deploying the project.\n\n\n## LLM(Large Language Model) and Embedding Model\n* A saas LLM model like OpenAI API or self-hosted LLM model with requirements:\n  * Smarter than GPT-3.5\n  * Provide openai-like API\n* Embedding model: AutoFlow needs an embedding model to translate the text into vectors. You can use the following:\n  * OpenAI-like embedding model\n  * Cohere embedding model\n  * ZhipuAI embedding model\n  * You can also use the [Jina AI API](https://jina.ai/) for this purpose. It is free for 1M tokens.\n* (Optional) Reranker. You can use the [Jina AI API](https://jina.ai/) for this purpose. It is free for 1M tokens.\n\n\n## TiDB\n* With [TiDB Serverless](https://pingcap.com/ai) account, you can setup a TiDB cluster with Vector Search enabled. Free quota is available for 1M RU per month.\n* You can also use a self-hosted TiDB cluster(>v8.4) with Vector Search enabled, but please note it will require TiFlash enabled for Vector Search.\n\n\n## Hardware\n\n### If you are using a Cloud TiDB and SaaS LLM\nYou can use any of the following web hosting services to deploy the project:\n* Cloud server providers like [AWS](https://aws.amazon.com/), [Google Cloud](https://cloud.google.com/), [Azure](https://azure.microsoft.com/), etc.\n* Or your own server.\n\nWe suggest the following configuration for the server:\n\n| Name                 | Value            |\n|----------------------|------------------|\n| CPU                  | 4 vCPUs          |\n| Memory               | 8 GB RAM         |\n| Disk                 | 200 GB SSD       |\n| Number of servers    | 1                |\n\n\n### If you are using a self-hosted TiDB and self-hosted LLM\nIf you use a self-hosted TiDB and self-hosted LLM, you need a powerful server to handle the load. We suggest the following configuration for the server:\n\n| Name                 | Value            |\n|----------------------|------------------|\n| CPU                  | 32 vCPUs         |\n| Memory               | 64 GB RAM        |\n| Disk                 | 500 GB SSD       |\n| GPU                  | 1 x NVIDIA A100  |\n| Number of servers    | 1                |\n\nGPU here is used for the LLM model, you can use any other GPU model that can be used for the LLM model which has capability more than gpt-3.5."
  },
  {
    "path": "docs/src/content/reranker-model.mdx",
    "content": "# Configure Reranker Model\n\nIn a RAG system, a reranker is a model that re-ranks the retrieved documents based on the user's query. The reranker model is used to improve the relevance of the retrieved documents.\n\n## Configure Reranker Model\n\nAfter logging in with an admin account, you can configure the Reranker Model in the admin panel.\n\n1. Click on the `Models > Reranker Models` tab;\n2. Click on the `New Reranker Model` button to add a new Reranker;\n\n![reranker-config](https://github.com/user-attachments/assets/f31bf557-16b3-4f7e-9e70-00c9fd7a5b1b \"Reranker Config\")\n\n3. Input your Reranker information and click `Create Reranker` button;\n\n## Supported Reranker Providers\n\nCurrently AutoFlow supports the following reranker providers:\n\n### JinaAI\n\nTo learn more about JinaAI reranking, please visit [Jina AI Reranker](https://jina.ai/reranker/).\n\n### Cohere\n\nTo learn more about Cohere reranking, please visit [Cohere Rerank](https://cohere.com/rerank/).\n\n### vLLM\n\nTo use vLLM rerankers, you need to provide the **base_url** of the reranker API as the following JSON format in **Advanced Settings**:\n\n```json\n{\n    \"base_url\": \"{api_base_url}\"\n}\n```\n\nDefault config:\n\n```json\n{\n    \"base_url\": \"http://localhost:8000\"\n}\n```\n\nTo learn more about vLLM reranking, please visit [vLLM Sentence Pair Scoring Models](https://docs.vllm.ai/en/latest/models/supported_models.html#sentence-pair-scoring-task-score).\n\n### Xorbits Inference (Xinference)\n\nTo use Xinference rerankers, you need to provide the **base_url** of the reranker API as the following JSON format in **Advanced Settings**:\n\n```json\n{\n    \"base_url\": \"{api_base_url}\"\n}\n```\n\nDefault config:\n\n```json\n{\n    \"base_url\": \"http://localhost:9997\"\n}\n```\n\nTo learn more about Xinference reranking, please visit [Xinference Rerank](https://inference.readthedocs.io/en/latest/models/model_abilities/rerank.html).\n\n### Amazon Bedrock\n\nTo use Amazon Bedrock rerankers, you'll need to provide a JSON Object of your AWS Credentials, as described in the [AWS CLI config global settings](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-global):\n\n```json\n{\n    \"aws_access_key_id\": \"****\",\n    \"aws_secret_access_key\": \"****\",\n    \"aws_region_name\": \"us-west-2\"\n}\n```\n\nTo find more reranking models supported by Amazon Bedrock, please visit [Amazon Bedrock Models Reference](https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models-reference.html).\n"
  },
  {
    "path": "docs/src/content/resources.mdx",
    "content": "# Resources\n\nHere are some videos tutorials help you to understand the basics of the platform and how to use it.\n\n## Presentation: Intro to TiDB (Graph) RAG\n> Agenda:\n> - Intro to TiDB RAG / Graph RAG\n> - How it works?\n> - Vector RAG + Graph RAG + Fulltext Search(WIP)\n> - Plan-able RAG engine - ngaut/StackVM\n> - Benefits of using TiDB for AI app storage\n> - How to copy one?\n\nGoogle Slides: [Intro to TiDB (Graph) RAG](https://docs.google.com/presentation/d/1uK9oOW3QGHiNoqYUpFRqHPN4J_FvB2qeDsmAFMKASaY/edit?usp=sharing)\n\n## Video: Intro to PingCAP/AutoFlow Post-Verification and Graph Search\n> This video will show you how to use the platform to search(from graph) for information and verify the results.\n> We introduce a new feature called Post-Verification which allows you to verify the results and correct them if needed.\n\n<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/PsXcKA1UckU?si=gfwECMTI3GFk0HXX\" title=\"YouTube video player\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share; fullscreen\"></iframe>"
  },
  {
    "path": "docs/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"target\": \"ES2017\",\n    \"lib\": [\n      \"dom\",\n      \"dom.iterable\",\n      \"esnext\"\n    ],\n    \"allowJs\": true,\n    \"skipLibCheck\": true,\n    \"strict\": false,\n    \"noEmit\": true,\n    \"incremental\": true,\n    \"module\": \"esnext\",\n    \"esModuleInterop\": true,\n    \"moduleResolution\": \"node\",\n    \"resolveJsonModule\": true,\n    \"isolatedModules\": true,\n    \"jsx\": \"preserve\",\n    \"plugins\": [\n      {\n        \"name\": \"next\"\n      }\n    ]\n  },\n  \"include\": [\n    \"next-env.d.ts\",\n    \".next/types/**/*.ts\",\n    \"**/*.ts\",\n    \"**/*.tsx\",\n    \"**.ts\",\n  ],\n  \"exclude\": [\n    \"node_modules\"\n  ]\n}\n"
  },
  {
    "path": "e2e/.gitignore",
    "content": "node_modules/\n/test-results/\n/playwright-report/\n/blob-report/\n/playwright/.cache/\n\n.idea\n\n.env.local\nlocal-test.sh\n\n# Internal output\n.credentials\n*.stdout\n*.stderr\nscreenshots\n.vercel\n"
  },
  {
    "path": "e2e/README.md",
    "content": "# Integration Test for TIDB.ai\n"
  },
  {
    "path": "e2e/deploy-test-result.sh",
    "content": "#!/bin/bash\n\nset -e\n\nif [[ \"${VERCEL_TOKEN}\" && \"${VERCEL_ORG_ID}\" && \"${VERCEL_PROJECT_ID}\" ]]; then\n  npx vercel deploy --yes --token \"${VERCEL_TOKEN}\" \"${VERCEL_CLI_ARGS}\"\nelse\n  echo \"\"\n  echo \"> Not configured\"\n  echo \"\"\n  exit 1\nfi\n"
  },
  {
    "path": "e2e/docker-compose.yml",
    "content": "name: tidb-ai-integration-test\n\nservices:\n  redis:\n    image: redis:6.0.16\n    restart: always\n    pull_policy: always\n    volumes:\n      - ${E2E_DATA_REDIS_DIR:-.data/redis}:/data\n\n  backend:\n    image: tidbai/backend:${E2E_DOCKER_TAG_BACKEND}\n    platform: ${E2E_DOCKER_PLATFORM}\n    restart: always\n    depends_on:\n      - redis\n    ports:\n      - \"5001:80\"\n    env_file:\n      - .env.backend\n    volumes:\n      - ${E2E_DATA_STORAGE_DIR:-.data/storage}:/shared/data\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n    extra_hosts:\n      host.docker.internal: host-gateway\n\n  frontend:\n    image: tidbai/frontend:${E2E_DOCKER_TAG_FRONTEND}\n    platform: ${E2E_DOCKER_PLATFORM}\n    restart: always\n    depends_on:\n      - backend\n    ports:\n      - \"3000:3000\"\n    environment:\n      BASE_URL: http://backend\n    env_file:\n      - .env.frontend\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n\n  background:\n    image: tidbai/backend:${E2E_DOCKER_TAG_BACKEND}\n    platform: ${E2E_DOCKER_PLATFORM}\n    restart: always\n    depends_on:\n      - redis\n    ports:\n      - \"5555:5555\"\n    env_file:\n      - .env.backend\n    command: /usr/bin/supervisord\n    volumes:\n      - ${E2E_DATA_STORAGE_DIR:-.data/storage}:/shared/data\n    logging:\n      driver: json-file\n      options:\n        max-size: \"50m\"\n        max-file: \"6\"\n    extra_hosts:\n      host.docker.internal: host-gateway\n\n  static-web-server:\n    image: joseluisq/static-web-server:2\n    restart: always\n    ports:\n      - \"4001:80\"\n    volumes:\n      - ./test-html:/public\n"
  },
  {
    "path": "e2e/global.setup.ts",
    "content": "import { config } from 'dotenv';\n\nexport default function () {\n  config({\n    path: '.credentials',\n  });\n}"
  },
  {
    "path": "e2e/package.json",
    "content": "{\n  \"name\": \"tidb.ai-playwright\",\n  \"version\": \"1.0.0\",\n  \"main\": \"index.js\",\n  \"keywords\": [],\n  \"author\": \"\",\n  \"license\": \"ISC\",\n  \"description\": \"\",\n  \"devDependencies\": {\n    \"@playwright/test\": \"^1.46.0\",\n    \"@types/node\": \"^22.2.0\"\n  },\n  \"dependencies\": {\n    \"dotenv\": \"^16.4.5\"\n  }\n}\n"
  },
  {
    "path": "e2e/playwright.config.ts",
    "content": "import { defineConfig, devices } from '@playwright/test';\n\n/**\n * Read environment variables from file.\n * https://github.com/motdotla/dotenv\n */\n// import dotenv from 'dotenv';\n// dotenv.config({ path: path.resolve(__dirname, '.env') });\n\n/**\n * See https://playwright.dev/docs/test-configuration.\n */\nexport default defineConfig({\n  testDir: './tests',\n  /* Run tests in files in parallel */\n  fullyParallel: true,\n  /* Fail the build on CI if you accidentally left test.only in the source code. */\n  forbidOnly: !!process.env.CI,\n  /* Retry on CI only */\n  retries: 0,\n  /* Opt out of parallel tests on CI. */\n  workers: process.env.CI ? 1 : undefined,\n  /* Reporter to use. See https://playwright.dev/docs/test-reporters */\n  reporter: 'html',\n  globalSetup: 'global.setup.ts',\n  /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */\n  use: {\n    /* Base URL to use in actions like `await page.goto('/')`. */\n    baseURL: 'http://127.0.0.1:3000',\n\n    /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */\n    trace: 'on',\n    screenshot: 'off',\n    video: 'on',\n  },\n\n  /* Configure projects for major browsers */\n  projects: [\n    {\n      name: 'bootstrap',\n      use: { ...devices['Desktop Chrome'] },\n      testMatch: 'tests/bootstrap.ts',\n    },\n    {\n      name: 'chromium',\n      use: { ...devices['Desktop Chrome'] },\n      dependencies: ['bootstrap'],\n    },\n\n    // {\n    //   name: 'firefox',\n    //   use: { ...devices['Desktop Firefox'] },\n    // },\n    //\n    // {\n    //   name: 'webkit',\n    //   use: { ...devices['Desktop Safari'] },\n    // },\n\n    /* Test against mobile viewports. */\n    // {\n    //   name: 'Mobile Chrome',\n    //   use: { ...devices['Pixel 5'] },\n    // },\n    // {\n    //   name: 'Mobile Safari',\n    //   use: { ...devices['iPhone 12'] },\n    // },\n\n    /* Test against branded browsers. */\n    // {\n    //   name: 'Microsoft Edge',\n    //   use: { ...devices['Desktop Edge'], channel: 'msedge' },\n    // },\n    // {\n    //   name: 'Google Chrome',\n    //   use: { ...devices['Desktop Chrome'], channel: 'chrome' },\n    // },\n  ],\n\n  /* Run your local dev server before starting the tests */\n  // webServer: {\n  //   command: 'npm run start',\n  //   url: 'http://127.0.0.1:3000',\n  //   reuseExistingServer: !process.env.CI,\n  // },\n});\n"
  },
  {
    "path": "e2e/prepare-test.sh",
    "content": "#!/bin/bash\n\nset -e\n\nTIDB_VERSION=v8.5.0\n\n# Download tiup\nfunction prepare_tidb () {\n  curl --proto '=https' --tlsv1.2 -sSf https://tiup-mirrors.pingcap.com/install.sh | sh\n  PATH=${PATH}:/home/runner/.tiup/bin\n  tiup install playground tidb:${TIDB_VERSION} pd:${TIDB_VERSION} tikv:${TIDB_VERSION} tiflash:${TIDB_VERSION}\n}\n\n# Pull docker images\nfunction prepare_docker_images() {\n  docker compose pull frontend background backend redis static-web-server\n}\n\nprepare_tidb &\nPID1=$!\n\nprepare_docker_images &\nPID2=$!\n\nwait $PID1\nwait $PID2\n"
  },
  {
    "path": "e2e/res/sample-evaluation-dataset.csv",
    "content": "\"id\",\"query\",\"reference\",\"topic_type\"\n\"42\",\"Example Query\",\"Example Reference\",\"Basic Knowledge\"\n"
  },
  {
    "path": "e2e/start-test.sh",
    "content": "#!/bin/bash\n\nset -e\n\n# Formats\nNO_FORMAT=\"\\033[0m\"\nF_BOLD=\"\\033[1m\"\nF_UNDERLINED=\"\\033[4m\"\nC_AQUA=\"\\033[38;5;14m\"\nTAG=\"${F_BOLD}${F_UNDERLINED}${C_AQUA}[TiDB.AI Integration Test]${NO_FORMAT}\"\n\nTIDB_VERSION=v8.5.0\n\nPATH=${PATH}:/home/runner/.tiup/bin\n\necho -e \"$TAG Creating temp dir\"\nexport E2E_DATA_STORAGE_DIR=$(mktemp -d \"${TMPDIR:-/tmp/}\"/tidbai-storage.XXXXXXXX | sed 's#//#/#g')\nexport E2E_DATA_REDIS_DIR=$(mktemp -d \"${TMPDIR:-/tmp/}\"/tidbai-redis.XXXXXXXX | sed 's#//#/#g')\necho E2E_DOCKER_TAG_FRONTEND: ${E2E_DOCKER_TAG_FRONTEND}\necho E2E_DOCKER_TAG_BACKEND: ${E2E_DOCKER_TAG_BACKEND}\necho E2E_DATA_STORAGE_DIR: ${E2E_DATA_STORAGE_DIR}\necho E2E_DATA_REDIS_DIR: ${E2E_DATA_REDIS_DIR}\n\necho -e \"$TAG Starting TiDB\"\n\nCLUSTER_TAG=e2e_$(xxd -l4 -ps /dev/urandom)\n\nTIDB_PID=\n\n# Cleanups\nfunction clean_up {\n  ARG=$?\n  echo -e \"$TAG Cleaning up...\"\n\n  # Stop dockers\n  echo -e \"$TAG Shutdown dockers...\"\n  docker compose down frontend background backend redis static-web-server\n\n  # Stop tiup playground and cleanup data\n  echo -e \"$TAG Stopping tiup playground cluster...\"\n  echo -e \"$TAG Wait until TiDB down...\"\n  kill $TIDB_PID 2>/dev/null || true\n  while ! tiup playground display ${CLUSTER_TAG} >/dev/null 2>/dev/null\n  do\n    sleep 1\n  done\n  echo -e \"$TAG Cleaning tiup playground data...\"\n  tiup clean ${CLUSTER_TAG}\n\n  # Remove temp dirs\n  echo -e \"$TAG Cleaning temp data dirs\"\n  rm -rf ${E2E_DATA_STORAGE_DIR} ${E2E_DATA_REDIS_DIR} 2>/dev/null || true\n\n  exit $ARG\n}\n\ntrap clean_up EXIT\n\necho -e \"$TAG Create tiup playground cluster...\"\ntiup playground ${TIDB_VERSION} --tag ${CLUSTER_TAG} --without-monitor \\\n  --db 1 --pd 1 --tiflash 1 --kv 1 \\\n  --db.host 0.0.0.0 --pd.host 0.0.0.0 &\nTIDB_PID=$!\necho -e \"$TAG Wait until TiDB ready...\"\nwhile ! tiup playground display ${CLUSTER_TAG} >/dev/null 2>/dev/null\ndo\n  sleep 1\ndone\n\necho -e \"$TAG Execute migrations\"\ndocker compose run --rm backend /bin/sh -c \"alembic upgrade head\"\n\necho -e \"$TAG Execute bootstrap\"\ndocker compose run --rm backend /bin/sh -c \"python bootstrap.py\" > bootstrap.stdout\n\necho -e \"$TAG Extract initial username and password\"\ncat bootstrap.stdout | grep IMPORTANT | sed 's/^.*email: \\(.*\\) and password: \\(.*\\)$/USERNAME=\\1\\nPASSWORD=\\2/' > .credentials\ncat .credentials\n\necho -e \"$TAG Start components\"\ndocker compose up -d redis frontend backend background static-web-server\n\necho -e \"$TAG Wait until tidb.ai frontend ready...\"\nwhile ! curl http://127.0.0.1:3000 > /dev/null 2>/dev/null\ndo\n  sleep 1\ndone\n\necho -e \"$TAG Wait until tidb.ai backend ready...\"\nwhile ! curl http://127.0.0.1:5001 > /dev/null 2>/dev/null\ndo\n  sleep 1\ndone\n\nnpx playwright test ${PLAYWRIGHT_ARGS}\n\nif [ ! \"${CI}\" ]; then\n  npx playwright show-report\nfi\n"
  },
  {
    "path": "e2e/test-html/example-doc-1.html",
    "content": "<html lang=\"en\">\n<head>\n  <meta charset=\"UTF-8\">\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n  <title>Example Document 1</title>\n</head>\n<body>\n<h1>This is an example document</h1>\n<p>Good job, you found this document!</p>\n</body>\n</html>"
  },
  {
    "path": "e2e/test-html/example-doc-2.html",
    "content": "<html lang=\"en\">\n<head>\n  <meta charset=\"UTF-8\">\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n  <title>Example Document 2</title>\n</head>\n<body>\n<h1>This is an example document</h1>\n<p>Good job, you found this document!</p>\n</body>\n</html>"
  },
  {
    "path": "e2e/test-html/example-sitemap.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">\n    <url>\n        <loc>http://static-web-server/example-doc-1.html</loc>\n        <changefreq>daily</changefreq>\n        <priority>0.9</priority>\n    </url>\n    <url>\n        <loc>http://static-web-server/example-doc-2.html</loc>\n        <changefreq>daily</changefreq>\n        <priority>0.9</priority>\n    </url>\n</urlset>\n"
  },
  {
    "path": "e2e/test-html/widget-controlled.html",
    "content": "<!doctype html>\n<html lang=\"en\">\n<head>\n  <meta charset=\"UTF-8\">\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n  <title>Document</title>\n</head>\n<body>\n<script async src=\"http://127.0.0.1:3000/widget.js\" data-api-base=\"http://127.0.0.1:3000\" data-controlled=\"true\"></script>\n</body>\n</html>"
  },
  {
    "path": "e2e/test-html/widget.html",
    "content": "<!doctype html>\n<html lang=\"en\">\n<head>\n  <meta charset=\"UTF-8\">\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n  <title>Document</title>\n</head>\n<body>\n<script async src=\"http://127.0.0.1:3000/widget.js\"></script>\n</body>\n</html>"
  },
  {
    "path": "e2e/tests/api-keys.spec.ts",
    "content": "import { expect, test } from '@playwright/test';\nimport { loginViaApi } from '../utils/login';\n\ntest.use({\n  permissions: ['clipboard-read'],\n});\n\ntest('API Keys Operations', async ({ page, baseURL }) => {\n\n  await test.step('Login', async () => {\n    await loginViaApi(page);\n  });\n\n  await test.step('Click Nav', async () => {\n    await page.goto('/');\n    await page.getByText('API Keys').click();\n    await page.waitForURL('/api-keys');\n  });\n\n  const apiKey = await test.step('Create API Key', async () => {\n    await page.getByText('Create', { exact: true }).click();\n\n    const dialog = page.getByRole('dialog', { name: 'Create API Key' });\n    await dialog.waitFor({ state: 'visible' });\n\n    await page.fill('input[name=description]', 'Test API Key');\n    await page.getByRole('button', { name: 'Create API Key', exact: true }).click();\n\n    await dialog.waitFor({ state: 'hidden' });\n\n    const apiKey = await page.evaluate('navigator.clipboard.readText()');\n    expect(apiKey).not.toBeFalsy();\n\n    return apiKey;\n  });\n\n  await test.step(`Try fetch with API Key ${apiKey}`, async () => {\n    const response = await fetch(`${baseURL}/api/v1/api-keys`, {\n      headers: {\n        Authorization: `Bearer ${apiKey}`,\n      },\n    });\n\n    if (!response.ok) {\n      console.error(`${response.status} ${response.statusText}`, await response.text());\n    }\n\n    expect(response.ok).toBe(true);\n  });\n\n  await test.step('Delete API Key', async () => {\n    const row = page.getByText('Test API Key').locator('..');\n    await row.getByRole('button', { name: 'Delete' }).click();\n\n    const dialog = page.getByRole('alertdialog', { name: 'Are you absolutely sure?' });\n    await dialog.waitFor({ state: 'visible' });\n\n    await dialog.getByRole('button', { name: 'Continue' }).click();\n    await dialog.getByRole('button', { name: 'Continue' }).waitFor({ state: 'detached' });\n    await dialog.waitFor({ state: 'hidden' });\n\n    await row.waitFor({ state: 'detached' });\n  });\n\n  await test.step(`Try fetch with API Key (expect 401 Unauthorized)`, async () => {\n    const response = await fetch(`${baseURL}/api/v1/api-keys`, {\n      headers: {\n        Authorization: `Bearer ${apiKey}`,\n      },\n    });\n    expect(response.status).toBe(401);\n  });\n});\n"
  },
  {
    "path": "e2e/tests/api.spec.ts",
    "content": "import { expect, test } from '@playwright/test';\n\nimport type { APIResponse } from 'playwright-core';\nimport { loginViaApi } from '../utils/login';\n\n// let key: string = '';\n//\n// test.beforeAll(async ({ request }) => {\n//   const response = await request.post('/api/v1/api-leys', {\n//     data: JSON.stringify({\n//       description: 'E2E API Tests',\n//     }),\n//   });\n//\n//   const { api_key } = await response.json();\n//   key = api_key;\n//\n//   console.log(`Created API Key, ${key}`);\n// });\n\ntest.describe('API', () => {\n  test('Bootstrap Status API', async ({ request }) => {\n    await expectOk(request.get('/api/v1/system/bootstrap-status'));\n  });\n\n  test('Retrieve Entity or Entites', async ({ request }) => {\n    async function expectGetOkStep (url: string) {\n      await test.step(url, async () => {\n        const response = await request.get(url);\n\n        if (!response.ok()) {\n          console.error(`${response.status()} ${response.statusText()}`, await response.text());\n        }\n\n        await expectOk(response);\n      });\n    }\n\n    await loginViaApi({ request });\n\n    //\n    // =====\n    // User\n\n    await expectGetOkStep('/api/v1/chats');\n\n    await expectGetOkStep('/api/v1/site-config');\n\n    await expectGetOkStep('/api/v1/api-keys');\n\n    //\n    // =====\n    // Admin\n\n    await expectGetOkStep('/api/v1/admin/chat-engines');\n\n    // Deprecated?\n    await expectGetOkStep('/api/v1/admin/documents');\n    await expectGetOkStep('/api/v1/admin/documents?page=1&size=10&language=en');\n    await expectGetOkStep('/api/v1/admin/documents?page=1&size=10&source_uri=system-variables&created_at_start=2024-07-04T11:58:00&created_at_end=2024-07-04T12:58:00&updated_at_start=2024-08-22T14:36:59&updated_at_end=2024-08-23T14:37:59&last_modified_at_start=2024-07-04T11:57:58&last_modified_at_end=2024-07-05T11:57:59&name=System&mime_type=text/markdown&index_status=failed');\n\n    await expectGetOkStep('/api/v1/admin/knowledge_bases');\n\n    await expectGetOkStep('/api/v1/admin/knowledge_bases/1/documents');\n    await expectGetOkStep('/api/v1/admin/knowledge_bases/1/documents?page=1&size=10&language=en');\n    await expectGetOkStep('/api/v1/admin/knowledge_bases/1/documents?page=1&size=10&source_uri=system-variables&created_at_start=2024-07-04T11:58:00&created_at_end=2024-07-04T12:58:00&updated_at_start=2024-08-22T14:36:59&updated_at_end=2024-08-23T14:37:59&last_modified_at_start=2024-07-04T11:57:58&last_modified_at_end=2024-07-05T11:57:59&name=System&mime_type=text/markdown&index_status=failed');\n\n    await expectGetOkStep('/api/v1/admin/site-settings');\n\n    await expectGetOkStep('/api/v1/admin/feedbacks');\n\n    await expectGetOkStep('/api/v1/admin/llms');\n    await expectGetOkStep('/api/v1/admin/llms/providers/options');\n\n    await expectGetOkStep('/api/v1/admin/embedding-models');\n    await expectGetOkStep('/api/v1/admin/embedding-models/providers/options');\n\n    await expectGetOkStep('/api/v1/admin/reranker-models');\n    await expectGetOkStep('/api/v1/admin/reranker-models/providers/options');\n\n    await expectGetOkStep('/api/v1/admin/retrieve/documents?chat_engine=1&question=what%20is%20tidb&chat_engine=default&top_k=5');\n    await expectGetOkStep('/api/v1/admin/embedding_retrieve?chat_engine=1&question=what%20is%20tidb&chat_engine=default&top_k=5');\n\n    await expectGetOkStep('/api/v1/admin/evaluation/datasets');\n    await expectGetOkStep('/api/v1/admin/evaluation/tasks');\n  });\n});\n\nasync function expectOk (response: APIResponse | Promise<APIResponse>) {\n  expect((await response).ok()).toBe(true);\n}\n"
  },
  {
    "path": "e2e/tests/bootstrap.ts",
    "content": "import { expect, test } from '@playwright/test';\n\ntest.use({\n  trace: !!process.env.CI ? 'off' : 'on',\n});\n\ntest('Bootstrap', async ({ browser, page }) => {\n  test.slow();\n\n  const {\n    USERNAME,\n    PASSWORD,\n    E2E_LLM_PROVIDER,\n    E2E_LLM_MODEL,\n    E2E_LLM_CREDENTIALS,\n    E2E_EMBEDDING_PROVIDER,\n    E2E_EMBEDDING_MODEL,\n    E2E_EMBEDDING_CREDENTIALS,\n    E2E_RERANKER_PROVIDER,\n    E2E_RERANKER_MODEL,\n    E2E_RERANKER_CREDENTIALS,\n  } = process.env;\n\n  await test.step('Visit home page', async () => {\n    await page.goto('/');\n\n    // IMPORTANT: Prevent recording credentials\n    await page.addStyleTag({\n      content: `[name=credentials] { filter: blur(1.5rem); }`,\n    });\n    await expect(page).toHaveTitle('TiDB.AI');\n    await expect(page.getByText('Ask anything about TiDB')).toBeVisible();\n  });\n\n  const hasWizardAlert = await page.getByText('This site is not ready to use yet.').isVisible();\n\n  if (!hasWizardAlert) {\n    return;\n  }\n\n  await test.step('Login', async () => {\n    if (await page.getByRole('link', { name: 'Login', exact: true }).count() === 0) {\n      console.warn('Already logged in');\n      return;\n    }\n    await page.getByRole('link', { name: 'Login', exact: true }).click();\n\n    const usernameInput = await page.waitForSelector('[name=username]');\n    const passwordInput = await page.waitForSelector('[name=password]');\n    const loginButton = page.getByRole('button', { name: 'Login', exact: true });\n\n    // Fill in credentials\n    await usernameInput.fill(USERNAME);\n    await passwordInput.fill(PASSWORD);\n\n    // Click login\n    await loginButton.click();\n\n    // Wait for dialog dismiss\n    await page.getByRole('dialog', { name: 'Sign In' }).waitFor({ state: 'detached' });\n\n    // Wait login\n    await page.getByText(USERNAME).waitFor({ state: 'visible' });\n  });\n\n  await test.step('Open admin side menu', async () => {\n    const modelTab = page.getByText('Models', { exact: true }).and(page.locator('[data-sidebar=\"menu-button\"]'));\n    if ((await modelTab.getAttribute('data-state')) !== 'open') {\n      await modelTab.click();\n    }\n  });\n\n  async function clickTab (text: string, url: string) {\n    await test.step(`Goto ${text} page`, async () => {\n      await page.getByText(text, { exact: true }).and(page.locator('[data-sidebar=\"menu-sub-button\"]').or(page.locator('[data-sidebar=\"menu-button\"]'))).click();\n      await page.waitForURL(url);\n      await page.getByText(`New ${text.replace(/s$/, '')}`).waitFor({ state: 'visible' });\n    });\n  }\n\n  // Setup reranker\n  await test.step(`Create Default Reranker (${E2E_RERANKER_PROVIDER} ${E2E_RERANKER_MODEL})`, async () => {\n    await clickTab('Reranker Models', '/reranker-models');\n\n    await page.getByText('Loading Data').waitFor({ state: 'detached' });\n    if (await page.getByText('My Reranker').count() === 0) {\n      await page.getByText('New Reranker Model').click();\n\n      // Fill name\n      const nameInput = await page.waitForSelector('[name=name]');\n      await nameInput.fill('My Reranker');\n\n      // Select provider\n      await page.getByLabel('Provider').locator('..').locator('button').click();\n      await page.getByRole('option').filter({\n        has: page.getByText(E2E_RERANKER_PROVIDER, { exact: true }),\n      }).click();\n\n      // Fill model if provided\n      if (E2E_RERANKER_MODEL) {\n        const modelInput = await page.waitForSelector('[name=model]');\n        await modelInput.fill(E2E_RERANKER_MODEL);\n      }\n\n      // Fill credentials\n      if (E2E_RERANKER_CREDENTIALS) {\n        const credentialsInput = await page.waitForSelector('[name=credentials]');\n        await credentialsInput.fill(E2E_RERANKER_CREDENTIALS);\n      }\n\n      // Click create button\n      const createButton = page.getByRole('button', { name: 'Create Reranker' });\n      await createButton.scrollIntoViewIfNeeded();\n      await createButton.click();\n\n      // Wait for finish by check the url changes\n      await page.waitForURL(/\\/reranker-models\\/\\d+/);\n    }\n  });\n\n  await test.step(`Create Default LLM (${E2E_LLM_PROVIDER} ${E2E_LLM_MODEL})`, async () => {\n    await clickTab('LLMs', '/llms');\n\n    await page.getByText('Loading Data').waitFor({ state: 'detached' });\n    if (await page.getByText('My LLM').count() === 0) {\n      await page.getByText('New LLM').click();\n\n      // Fill name\n      const nameInput = await page.waitForSelector('[name=name]');\n      await nameInput.fill('My LLM');\n\n      // Select provider\n      await page.getByLabel('Provider').locator('..').locator('button').click();\n      await page.getByRole('option').filter({\n        has: page.getByText(E2E_LLM_PROVIDER, { exact: true }),\n      }).click();\n\n      // Fill model if provided\n      if (E2E_LLM_MODEL) {\n        const modelInput = await page.waitForSelector('[name=model]');\n        await modelInput.fill(E2E_LLM_MODEL);\n      }\n\n      // Fill credentials\n      const credentialsInput = await page.waitForSelector('[name=credentials]');\n      await credentialsInput.fill(E2E_LLM_CREDENTIALS);\n\n      // Click create button\n      const createButton = page.getByRole('button', { name: 'Create LLM' });\n      await createButton.scrollIntoViewIfNeeded();\n      await createButton.click();\n\n      // Wait for finish by check the url changes\n      await page.waitForURL(/\\/llms\\/\\d+/);\n    }\n  });\n\n  await test.step(`Create Default Embedding model (${E2E_EMBEDDING_PROVIDER} ${E2E_EMBEDDING_MODEL || 'default'})`, async () => {\n    await clickTab('Embedding Models', '/embedding-models');\n\n    await page.getByText('Loading Data').waitFor({ state: 'detached' });\n    if (await page.getByText('My Embedding Model').count() === 0) {\n      await page.getByText('New Embedding Model').click();\n\n      // Fill name\n      const nameInput = await page.waitForSelector('[name=name]');\n      await nameInput.fill('My Embedding Model');\n\n      // Select provider\n      await page.getByLabel('Provider').locator('..').locator('button').click();\n      await page.getByRole('option').filter({\n        has: page.getByText(E2E_EMBEDDING_PROVIDER, { exact: true }),\n      }).click();\n\n      // Fill model if provided\n      if (E2E_EMBEDDING_MODEL) {\n        const modelInput = await page.waitForSelector('[name=model]');\n        await modelInput.fill(E2E_EMBEDDING_MODEL);\n      }\n\n      // Fill credentials\n      const credentialsInput = await page.waitForSelector('[name=credentials]');\n      await credentialsInput.fill(E2E_EMBEDDING_CREDENTIALS);\n\n      const vectorDimensionInput = await page.waitForSelector('[name=vector_dimension]');\n      await vectorDimensionInput.fill('1536');\n\n      // Click create button\n      const createButton = page.getByRole('button', { name: 'Create Embedding Model' });\n      await createButton.scrollIntoViewIfNeeded();\n      await createButton.click();\n\n      // Wait for finish by check the url changes\n      await page.waitForURL(/\\/embedding-models\\/\\d+/);\n    }\n  });\n\n  // Create Knowledge Base\n  await test.step('Create Knowledge Base', async () => {\n    await clickTab('Knowledge Bases', '/knowledge-bases');\n\n    await page.getByText('Loading Data').waitFor({ state: 'detached' });\n    if (await page.getByText('My Knowledge Base').count() === 0) {\n      await page.getByText('New Knowledge Base').click();\n      await page.waitForSelector('[name=name]');\n      await page.fill('input[name=name]', 'My Knowledge Base');\n      await page.fill('textarea[name=description]', 'This is E2E Knowledge Base.');\n      await page.getByRole('button', { name: 'Create', exact: true }).click();\n\n      await page.waitForURL(/\\/knowledge-bases\\/1\\/data-sources/);\n    }\n\n    // Create Datasource\n    await test.step('Create Datasource', async () => {\n      await page.goto('/knowledge-bases/1/data-sources');\n\n      if (await page.getByText('sample.pdf').count() === 0) {\n        await page.getByRole('button', { name: 'Files' }).click();\n\n        const nameInput = await page.waitForSelector('[name=name]');\n        await nameInput.fill('sample.pdf');\n\n        await page.setInputFiles('[name=files]', 'res/sample.pdf');\n\n        const createButton = page.getByRole('button', { name: 'Create' });\n        await createButton.scrollIntoViewIfNeeded();\n\n        await createButton.click();\n\n        // Jump back to KB data source page\n        await page.waitForURL(/\\/knowledge-bases\\/1\\/data-sources$/);\n      }\n    });\n  });\n\n  // Update default Chat Engine\n  await test.step('Update Chat Engine', async () => {\n    await clickTab('Chat Engines', '/chat-engines');\n    await page.getByText('Loading Data').waitFor({ state: 'detached' });\n    await page.getByRole('link', { name: 'default' }).click();\n\n    await page.getByRole('tab', { name: 'Retrieval' }).click();\n    await page.getByRole('button', { name: 'Knowledge Bases', exact: true }).click();\n    await page.getByRole('option').filter({ has: page.getByText('My Knowledge Base') }).click();\n    await page.click('body');\n\n    await page.getByRole('button', { name: 'Save', exact: true }).click();\n    await page.getByRole('button', { name: 'Save', exact: true }).waitFor({ state: 'detached' });\n  });\n\n  await test.step('Reload and check wizard alert', async () => {\n    await page.goto('/');\n    await page.getByText('This site is not ready to use yet.').waitFor({ state: 'detached' });\n  });\n\n  await test.step('Documents count greater than 0', async () => {\n    await page.goto('/knowledge-bases/1');\n    await page.getByRole('button', { name: 'sample.pdf' }).waitFor({ state: 'visible' });\n  });\n\n  await test.step('Wait for indexing', async () => {\n    while (true) {\n      const response = await page.request.get('/api/v1/admin/knowledge_bases/1/overview');\n      if (!response.ok()) {\n        console.warn(`${response.status()} ${response.statusText()}`, await response.text());\n      } else {\n        const json = await response.json();\n        if (json.vector_index.completed > 0) {\n          break;\n        }\n      }\n      await page.waitForTimeout(500);\n    }\n  });\n});\n"
  },
  {
    "path": "e2e/tests/chat-engine.spec.ts",
    "content": "import { expect, type Locator, type Page, test } from '@playwright/test';\nimport { checkCheckbox, selectOption, turnSwitch } from '../utils/forms';\nimport { loginViaApi } from '../utils/login';\n\ntest.describe('Chat Engine', () => {\n  test.describe('Configurations', () => {\n    test('Create with default configuration', async ({ page }) => {\n      await test.step('Goto page', async () => {\n        await loginViaApi(page);\n        await page.goto('/chat-engines');\n        await page.getByRole('button', { name: 'New Chat Engine' }).click();\n        await page.waitForURL('/chat-engines/new');\n      });\n\n      const name = 'All default configuration';\n\n      await test.step('Fill in fields', async () => {\n        // Fill in name\n        await page.getByRole('textbox', { name: 'Name' }).fill(name);\n\n        // Goto retrieval tab\n        await page.getByRole('tab', { name: 'Retrieval' }).click();\n\n        // Select default knowledge base\n        await selectOption(page, 'Knowledge Bases', /My Knowledge Base/, true);\n      });\n\n      const chatEngineId = await test.step('Create', async () => {\n        await page.getByRole('button', { name: 'Create Chat Engine' }).click();\n        await page.waitForURL(/\\/chat-engines\\/\\d+$/);\n\n        const [_, idString] = /\\/chat-engines\\/(\\d+)$/.exec(page.url());\n        return parseInt(idString);\n      });\n\n      await test.step('Validate configurations', async () => {\n        // Validate chat engine configurations\n        const chatEngine = await getChatEngine(page, chatEngineId);\n        expect(chatEngine.name).toBe(name);\n        expect(chatEngine.engine_options).toStrictEqual({\n          knowledge_base: {\n            linked_knowledge_bases: [{\n              id: 1,\n            }],\n          },\n          knowledge_graph: {\n            enabled: true,\n          },\n          hide_sources: false,\n          clarify_question: false,\n          further_questions: false,\n        });\n        expect(chatEngine.llm_id).toBeNull();\n        expect(chatEngine.fast_llm_id).toBeNull();\n        expect(chatEngine.reranker_id).toBeNull();\n      });\n\n      await test.step('Check availability', async () => {\n        await checkChatEngineAvailability(page, name);\n      });\n    });\n\n    test('Create with featured configuration', async ({ page }) => {\n      await test.step('Goto page', async () => {\n        await loginViaApi(page);\n        await page.goto('/chat-engines');\n        await page.getByRole('button', { name: 'New Chat Engine' }).click();\n        await page.waitForURL('/chat-engines/new');\n      });\n\n      const name = 'Featured configuration';\n\n      await test.step('Fill in fields', async () => {\n        // Fill in name\n        await page.getByRole('textbox', { name: 'Name' }).fill(name);\n\n        // Set LLM & Fast LLM\n        await selectOption(page, 'LLM', /My LLM/);\n        await selectOption(page, 'Fast LLM', /My LLM/);\n        // TODO: Create a Fast LLM in place\n\n        // Goto retrieval tab\n        await page.getByRole('tab', { name: 'Retrieval' }).click();\n        await selectOption(page, 'Knowledge Bases', /My Knowledge Base/, true);\n        await checkCheckbox(page, 'Hide Sources');\n\n        // Semantic Search Subsection\n        await selectOption(page, 'Reranker', /My Reranker/);\n\n        // Knowledge Graph Subsection\n        await page.getByRole('spinbutton', { name: 'Depth' }).fill('1'); // Do not use 2 for default value is 2\n        await checkCheckbox(page, 'Include Metadata');\n        await checkCheckbox(page, 'Using Intent Search');\n\n        // Goto Generation tab\n        await page.getByRole('tab', { name: 'Generation' }).click();\n\n        await turnSwitch(page, 'Clarify Question');\n      });\n\n      const chatEngineId = await test.step('Create', async () => {\n        await page.getByRole('button', { name: 'Create Chat Engine' }).click();\n        await page.waitForURL(/\\/chat-engines\\/\\d+$/);\n\n        const [_, idString] = /\\/chat-engines\\/(\\d+)$/.exec(page.url());\n        return parseInt(idString);\n      });\n\n      await test.step('Validate configurations', async () => {\n        // Validate chat engine configurations\n        const chatEngine = await getChatEngine(page, chatEngineId);\n        expect(chatEngine.name).toBe(name);\n        expect(chatEngine.engine_options).toStrictEqual({\n          knowledge_base: {\n            linked_knowledge_bases: [{\n              id: 1,\n            }],\n          },\n          knowledge_graph: {\n            enabled: true,\n            depth: 1,\n            include_meta: true,\n            using_intent_search: true,\n          },\n          hide_sources: true,\n          clarify_question: true,\n          further_questions: false,\n        });\n        expect(chatEngine.llm_id).toBe(1);\n        expect(chatEngine.fast_llm_id).toBe(1);\n        expect(chatEngine.reranker_id).toBe(1);\n      });\n\n      await test.step('Check availability', async () => {\n        await checkChatEngineAvailability(page, name);\n      });\n    });\n\n    test('Update', async ({ page }) => {\n      await test.step('Goto page', async () => {\n        await loginViaApi(page);\n        await page.goto('/chat-engines');\n        await page.getByRole('button', { name: 'New Chat Engine' }).click();\n        await page.waitForURL('/chat-engines/new');\n\n        const name = 'Chat Engine to be updated';\n\n        await test.step('Fill in fields', async () => {\n          // Fill in name\n          await page.getByRole('textbox', { name: 'Name' }).fill(name);\n\n          // Goto retrieval tab\n          await page.getByRole('tab', { name: 'Retrieval' }).click();\n\n          // Select default knowledge base\n          await selectOption(page, 'Knowledge Bases', /My Knowledge Base/, true);\n        });\n\n        const chatEngineId = await test.step('Create', async () => {\n          await page.getByRole('button', { name: 'Create Chat Engine' }).click();\n          await page.waitForURL(/\\/chat-engines\\/\\d+$/);\n\n          const [_, idString] = /\\/chat-engines\\/(\\d+)$/.exec(page.url());\n          return parseInt(idString);\n        });\n\n        await page.goto('/chat-engines/' + chatEngineId);\n\n        await test.step('Update Name', async () => {\n          await page.getByRole('textbox', { name: 'Name' }).fill('Chat Engine to be updated (updated)');\n          await waitUpdate(page, page.getByRole('textbox', { name: 'Name', disabled: false }));\n\n          expect(await getChatEngine(page, chatEngineId).then(ce => ce.name)).toBe('Chat Engine to be updated (updated)');\n        });\n\n        await test.step('Update LLM', async () => {\n          await selectOption(page, 'LLM', /My LLM/);\n          await waitUpdate(page, page.getByRole('button', { name: 'LLM', exact: true, disabled: false }));\n\n          expect(await getChatEngine(page, chatEngineId).then(ce => ce.llm_id)).toBe(1);\n        });\n\n        await page.getByRole('tab', { name: 'Retrieval' }).click();\n        await test.step('Update KG Depth', async () => {\n          await page.getByRole('spinbutton', { name: 'Depth' }).fill('3');\n          await waitUpdate(page, page.getByRole('spinbutton', { name: 'Depth', disabled: false }));\n\n          expect(await getChatEngine(page, chatEngineId).then(ce => ce.engine_options.knowledge_graph.depth)).toBe(3);\n        });\n\n        // TODO: add cases for rest fields\n      });\n    });\n  });\n});\n\n// TODO: The selectors are tricky. Update the select component to simplify the validation.\nasync function checkChatEngineAvailability (page: Page, name: string) {\n  await page.locator('[data-sidebar=\"menu\"] li').filter({ hasText: /Chat Engines/ }).getByRole('link').click();\n  // wait for chat engine table updated.\n  await page.getByText(name).waitFor();\n\n  await page.goto('/');\n\n  // Select the 'Select Chat Engine' combobox\n  const selector = page.getByRole('combobox').and(page.getByText('Select Chat Engine', { exact: true }).locator('..'));\n  await selector.click();\n  await page.getByRole('option', { name: name }).click();\n\n  // Input question\n  await page.getByPlaceholder('Input your question here...').fill('Hello');\n\n  // Send message\n  await page.keyboard.press('ControlOrMeta+Enter');\n\n  // Wait page url to be changed. When changed, the chat was created correctly.\n  // Ignore the returned message which is not important.\n  await page.waitForURL(/\\/c\\/.+$/);\n}\n\nasync function getChatEngine (page: Page, id: number) {\n  const ceResponse = await page.request.get(`/api/v1/admin/chat-engines/${id}`);\n  expect(ceResponse.ok()).toBe(true);\n  return await ceResponse.json();\n}\n\nasync function waitUpdate (page: Page, locator: Locator) {\n  await page.getByRole('button', { name: 'Save' }).click();\n  await page.getByRole('button', { name: 'Save' }).waitFor({ state: 'detached' });\n  await locator.waitFor();\n}"
  },
  {
    "path": "e2e/tests/chat.spec.ts",
    "content": "import { expect, test } from '@playwright/test';\nimport { getChatRequestPromise, QUESTION, testNewChat } from '../utils/chat';\nimport { loginViaApi } from '../utils/login';\n\ntest.describe.serial('Chat', () => {\n  test('From Home Page', async ({ page, baseURL }) => {\n    await test.step('Visit home page', async () => {\n      await page.goto('/');\n    });\n\n    const chatRequest = await test.step('Input text and ask', async () => {\n      await page.getByPlaceholder('Input your question here...').fill(QUESTION);\n\n      // https://playwright.dev/docs/events#waiting-for-event\n      const chatRequestPromise = getChatRequestPromise(page, baseURL);\n      const trigger = page.locator('button', { has: page.locator('svg.lucide-arrow-up') });\n      await trigger.click();\n\n      await expect(trigger).toBeDisabled();\n      return await chatRequestPromise;\n    });\n\n    await testNewChat(page, chatRequest, true, true);\n  });\n\n  test('From Keyboard Shortcut', async ({ page, baseURL }) => {\n    await test.step('Visit home page', async () => {\n      await page.goto('/');\n    });\n\n    const chatRequest = await test.step('Input text and ask', async () => {\n      await page.keyboard.press('ControlOrMeta+k');\n      await page.keyboard.insertText(QUESTION);\n\n      // https://playwright.dev/docs/events#waiting-for-event\n      const chatRequestPromise = getChatRequestPromise(page, baseURL);\n      await page.keyboard.press('ControlOrMeta+Enter');\n      return await chatRequestPromise;\n    });\n\n    await testNewChat(page, chatRequest, true, false);\n  });\n\n  test('Admin Feedback Page', async ({ page }) => {\n    await loginViaApi(page);\n    await page.goto('/feedbacks');\n    expect(await page.getByText('Good Good Good').count()).toBeGreaterThan(0);\n    expect(await page.getByText('Bad Bad Bad').count()).toBeGreaterThan(0);\n  });\n});\n"
  },
  {
    "path": "e2e/tests/datasource.spec.ts",
    "content": "import { expect, test } from '@playwright/test';\nimport { loginViaApi } from '../utils/login';\n\ntest.describe('Datasource', () => {\n  test.fixme('Web Single Page', async ({ page }) => {\n    test.slow();\n\n    await test.step('Login and visit KB page', async () => {\n      await loginViaApi(page);\n      await page.goto('/knowledge-bases/1/data-sources');\n    });\n\n    await test.step('Add Single Page Datasource', async () => {\n      await page.getByRole('button', { name: 'Web Pages' }).click();\n      await page.waitForURL('/knowledge-bases/1/data-sources/new?type=web_single_page');\n\n      await page.getByLabel('Name').fill('example site');\n\n      await page.getByRole('button', { name: 'New Item' }).click();\n      await page.locator('input[name=\"urls.0\"]').fill('https://example.com');\n      await page.getByRole('button', { name: 'New Item' }).click();\n      await page.locator('input[name=\"urls.1\"]').fill('https://www.iana.org/help/example-domains');\n\n      await page.getByRole('button', { name: 'Create' }).click();\n\n      await page.waitForURL('/knowledge-bases/1/data-sources');\n\n      test.fixme('check index status', async () => {\n        const id = /\\/datasources\\/(\\d+)/.exec(page.url())[1];\n        while (true) {\n          const response = await page.request.get(`/api/v1/admin/datasources/${id}/overview`);\n          if (response.ok()) {\n            const json = await response.json();\n            if (json.vector_index.completed === 2) {\n              break;\n            }\n          } else {\n            console.warn(`${response.status()} ${response.statusText()}`, await response.text());\n          }\n          await page.waitForTimeout(500);\n        }\n      });\n    });\n\n    await test.step('Check Documents Page', async () => {\n      await page.goto('/documents');\n      await expect(page.getByRole('link', { name: 'https://example.com' })).toBeVisible();\n      await expect(page.getByRole('link', { name: 'https://www.iana.org/help/example-domains' })).toBeVisible();\n    });\n  });\n\n  test.fixme('Web Sitemap', async ({ page }) => {\n    test.slow();\n\n    await test.step('Login and visit page', async () => {\n      await loginViaApi(page);\n\n      await page.goto('/datasources');\n      await expect(page.getByRole('heading', { name: 'Datasources' })).toBeVisible();\n    });\n\n    await test.step('Add Sitemap Datasource', async () => {\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.getByRole('tab', { name: 'Web Sitemap' }).click();\n      await page.waitForURL('/datasources/create/web-sitemap');\n\n      await page.getByLabel('Name').fill('example site from sitemap');\n      await page.getByLabel('Description').fill('This is example sitemap');\n\n      await page.locator('input[name=\"url\"]').fill('http://static-web-server/example-sitemap.xml');\n\n      await page.getByRole('button', { name: 'Create Datasource' }).click();\n\n      await page.waitForURL(/\\/datasources\\/\\d+/);\n\n      const id = /\\/datasources\\/(\\d+)/.exec(page.url())[1];\n      while (true) {\n        const response = await page.request.get(`/api/v1/admin/datasources/${id}/overview`);\n        if (response.ok()) {\n          const json = await response.json();\n          if (json.vector_index.completed === 2) {\n            break;\n          }\n        } else {\n          console.warn(`${response.status()} ${response.statusText()}`, await response.text());\n        }\n        await page.waitForTimeout(500);\n      }\n    });\n\n    await test.step('Check Documents Page', async () => {\n      await page.goto('/documents');\n      await expect(page.getByRole('link', { name: 'http://static-web-server/example-doc-1.html' })).toBeVisible();\n      await expect(page.getByRole('link', { name: 'http://static-web-server/example-doc-2.html' })).toBeVisible();\n    });\n  });\n\n  test.fixme('Files', () => {\n    test.fixme(true, 'Already tested in bootstrap');\n  });\n});"
  },
  {
    "path": "e2e/tests/evaluation.spec.ts",
    "content": "import { expect, type Page, test } from '@playwright/test';\nimport { loginViaApi } from '../utils/login';\n\ntest.describe('Evaluation Dataset Management', () => {\n  test('Create dataset with CSV', async ({ page }) => {\n    await loginViaApi(page);\n\n    await createEvaluationDataset(page, 'Example Dataset from CSV', 'res/sample-evaluation-dataset.csv');\n\n    // Data from CSV\n    await expect(page.getByText('Example Query')).toBeVisible();\n  });\n\n  test('Create dataset from scratch', async ({ page }) => {\n    await loginViaApi(page);\n    await createEvaluationDataset(page, 'Example Dataset from scratch', 'res/sample-evaluation-dataset.csv');\n\n    // Empty dataset items list.\n    await expect(page.getByText('Empty List')).toBeVisible();\n  });\n\n  test('Delete dataset', async ({ page }) => {\n    await loginViaApi(page);\n\n    await createEvaluationDataset(page, 'Example Dataset to delete');\n\n    await page.goto('/evaluation/datasets');\n\n    await expect(page.getByRole('row').filter({ hasText: 'Example Dataset to delete' })).toBeVisible();\n\n    await page.getByRole('row').filter({ hasText: 'Example Dataset to delete' }).locator('button').last().click();\n    await page.getByRole('menuitem', { name: 'Delete' }).click();\n    await page.getByRole('button', { name: 'Continue' }).click();\n    await page.getByRole('button', { name: 'Continue' }).waitFor({ state: 'detached' });\n\n    await page.getByRole('row').filter({ hasText: 'Example Dataset to delete' }).waitFor({ state: 'detached' });\n  });\n\n  test('Mutate dataset items', async ({ page }) => {\n    await loginViaApi(page);\n    const datasetId = await createEvaluationDataset(page, 'Example Dataset to update');\n\n    // Empty dataset items list.\n    await expect(page.getByText('Empty List')).toBeVisible();\n\n    await test.step('Add item', async () => {\n      await page.getByRole('button', { name: 'New Item' }).click();\n      await page.waitForURL(`/evaluation/datasets/${datasetId}/items/new`);\n      await page.getByRole('textbox', { name: 'Query' }).fill('Example Query');\n      await page.getByRole('textbox', { name: 'Reference' }).fill('Example Reference');\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.waitForURL(`/evaluation/datasets/${datasetId}`);\n\n      await page.getByRole('row').filter({ hasText: 'Example Query' }).waitFor({ state: 'attached' });\n      await page.getByRole('row').filter({ hasText: 'Example Reference' }).waitFor({ state: 'attached' });\n    });\n\n    await test.step('Delete Item', async () => {\n      await page.getByRole('row').filter({ hasText: 'Example Query' }).locator('button').last().click();\n      await page.getByRole('menuitem', { name: 'Delete' }).click();\n      await page.getByRole('button', { name: 'Continue' }).click();\n      await page.getByRole('button', { name: 'Continue' }).waitFor({ state: 'detached' });\n\n      await page.getByRole('row').filter({ hasText: 'Example Query' }).waitFor({ state: 'detached' });\n      await page.getByRole('row').filter({ hasText: 'Example Reference' }).waitFor({ state: 'detached' });\n    });\n  });\n\n});\n\nasync function createEvaluationDataset (page: Page, name: string, file?: string) {\n  await page.goto('/');\n\n  await test.step('Navigate to Create Evaluation Dataset Page', async () => {\n    await page.getByRole('button', { name: 'Evaluation' }).click();\n    await page.getByRole('link', { name: 'Datasets' }).click();\n    await page.waitForURL('/evaluation/datasets');\n    await page.getByRole('button', { name: 'New Evaluation Dataset' }).click();\n    await page.waitForURL('/evaluation/datasets/create');\n  });\n\n  return await test.step('Fill in form and submit', async () => {\n    await page.getByRole('textbox', { name: 'Name' }).fill(name);\n    if (file) {\n      await page.locator('[name=upload_file]').setInputFiles(file);\n    }\n    await page.getByRole('button', { name: 'Create' }).click();\n    await page.waitForURL(/\\/evaluation\\/datasets\\/\\d+/);\n\n    const [_, idString] = /\\/evaluation\\/datasets\\/(\\d+)/.exec(page.url());\n\n    return parseInt(idString);\n  });\n}\n"
  },
  {
    "path": "e2e/tests/knowledge-base.spec.ts",
    "content": "import { expect, type Page, test } from '@playwright/test';\nimport { selectOption, turnSwitch } from '../utils/forms';\nimport { loginViaApi } from '../utils/login';\n\ntest.describe('Knowledge Base', () => {\n  test('Configure Data Sources', async ({ page }) => {\n    const kbId = await createFeaturedKnowledgeBase(page, 'KnowledgeBase 1', true);\n\n    await test.step('Configure Files Data Source', async () => {\n      await page.getByRole('button', { name: 'Upload Files' }).click();\n      await page.waitForURL(/data-sources\\/new\\?type=file/);\n\n      await page.setInputFiles('[name=files]', 'res/sample.pdf');\n\n      await page.getByRole('textbox', { name: 'Datasource Name' }).fill('Files DataSource');\n\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.waitForURL(/\\/knowledge-bases\\/\\d+\\/data-sources/);\n      await pollKbOverviewUntill(page, kbId, overview => overview.documents.total === 1);\n    });\n\n    await test.step('Configure Web Pages Data Source', async () => {\n      await page.getByRole('button', { name: 'Select Pages' }).click();\n      await page.waitForURL(/data-sources\\/new\\?type=web_single_page/);\n\n      await page.getByRole('button', { name: 'New Item' }).click();\n      await page.getByPlaceholder('https://example.com').fill('http://static-web-server/example-doc-1.html');\n\n      await page.getByRole('textbox', { name: 'Datasource Name' }).fill('Web Pages DataSource');\n\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.waitForURL(/\\/knowledge-bases\\/\\d+\\/data-sources/);\n      await pollKbOverviewUntill(page, kbId, overview => overview.documents.total === 2);\n\n      // Check document exists\n      await page.getByRole('button', { name: /^Documents/ }).click();\n      await expect(page.getByRole('button', { name: 'Example Document 1' })).toBeVisible();\n      await page.getByRole('button', { name: /^Data Sources/ }).click();\n\n      await page.waitForURL(/\\/knowledge-bases\\/\\d+\\/data-sources/);\n    });\n\n    await test.step('Configure Sitemap Data Source', async () => {\n      await page.getByRole('button', { name: 'Select web sitemap.' }).click();\n      await page.waitForURL(/data-sources\\/new\\?type=web_sitemap/);\n\n      await page.getByRole('textbox', { name: 'Sitemap URL' }).fill('http://static-web-server/example-sitemap.xml');\n\n      await page.getByRole('textbox', { name: 'Datasource Name' }).fill('Web Sitemap DataSource');\n\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.waitForURL(/\\/knowledge-bases\\/\\d+\\/data-sources/);\n      await pollKbOverviewUntill(page, kbId, overview => overview.documents.total === 4);\n\n      // Check document exists\n      await page.getByRole('button', { name: /^Documents/ }).click();\n      await expect(page.getByRole('button', { name: 'Example Document 1' })).toHaveCount(2); // Documents are not deduplicated.\n      await expect(page.getByRole('button', { name: 'Example Document 2' })).toBeVisible();\n      await page.getByRole('button', { name: /^Data Sources/ }).click();\n    });\n\n    test.slow();\n    await test.step('Check for index progress', async () => {\n      await pollKbOverviewUntill(page, kbId,\n        overview =>\n          overview.documents.total === 4\n          && overview.chunks.total === 4\n          && overview.vector_index.completed === 4\n          && overview.kg_index.completed === 4,\n      );\n    });\n  });\n\n  test('Delete Data Sources', async ({ page }) => {\n    const kbId = await createFeaturedKnowledgeBase(page, 'KnowledgeBase 2');\n    await configureSimpleDataSource(page, kbId);\n\n    await test.step('Delete Document', async () => {\n      await page.goto(`/knowledge-bases/${kbId}/data-sources`);\n      await page.getByRole('button', { name: 'Delete' }).click();\n      await page.getByRole('button', { name: 'Continue' }).click();\n      await page.getByRole('button', { name: 'Continue' }).waitFor({ state: 'detached' });\n      // FIXME: reload data sources after deletion\n      await pollKbOverviewUntill(page, kbId, overview => {\n        return overview.documents.total === 0 && overview.chunks.total === 0;\n      });\n    });\n\n    await test.step('Wait for documents and chunks to be deleted', async () => {\n      await pollKbOverviewUntill(page, kbId, overview => {\n        return overview.documents.total === 0 && overview.chunks.total === 0;\n      });\n    });\n  });\n\n  test('Delete Documents', async ({ page }) => {\n    const kbId = await createFeaturedKnowledgeBase(page, 'KnowledgeBase 3');\n    await configureSimpleDataSource(page, kbId);\n\n    await test.step('Delete Document', async () => {\n      await page.goto(`/knowledge-bases/${kbId}`);\n      // FIXME: add aria roles\n      await page.getByRole('button').filter({ has: page.locator('.lucide-ellipsis') }).click();\n      await page.getByRole('menuitem', { name: 'Delete' }).click();\n      await page.getByRole('button', { name: 'Continue' }).click();\n      await page.getByRole('button', { name: 'Continue' }).waitFor({ state: 'detached' });\n\n    });\n\n    await test.step('Wait for documents and chunks to be deleted', async () => {\n      await pollKbOverviewUntill(page, kbId, overview => {\n        return overview.documents.total === 0 && overview.chunks.total === 0;\n      });\n    });\n  });\n});\n\nasync function createFeaturedKnowledgeBase (page: Page, name: string, enableKnowledgeGraph = false) {\n  await loginViaApi(page);\n  return await test.step(`Create KnowledgeBase ${name} (kg_index ${enableKnowledgeGraph ? 'enabled' : 'disabled'})`, async () => {\n    await test.step('Navigate to Create KnowledgeBase Page', async () => {\n      await page.goto('/knowledge-bases');\n      await page.getByRole('button', { name: 'New Knowledge Base' }).click();\n      await page.waitForURL('/knowledge-bases/new');\n    });\n\n    await test.step('Fill KnowledgeBase Form', async () => {\n      await page.getByRole('textbox', { name: 'Name' }).fill(name);\n      await page.getByRole('textbox', { name: 'Description' }).fill(`KnowledgeBase Description for ${name}`);\n\n      await selectOption(page, 'LLM', /My LLM/);\n      await selectOption(page, 'Embedding Model', /My Embedding Model/);\n\n      if (enableKnowledgeGraph) {\n        await turnSwitch(page, 'Knowledge Graph Index');\n      }\n    });\n\n    return await test.step('Create and jump to data sources page', async () => {\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.waitForURL(/\\/knowledge-bases\\/\\d+\\/data-sources/);\n\n      const [, idString] = /\\/knowledge-bases\\/(\\d+)\\/data-sources/.exec(page.url());\n      return parseInt(idString);\n    });\n  });\n}\n\nasync function configureSimpleDataSource (page: Page, kbId: number, enableKnowledgeGraph = false) {\n  await test.step(`Configure simple data source`, async () => {\n    await test.step(`Upload simple file`, async () => {\n      await page.getByRole('button', { name: 'Upload Files' }).click();\n      await page.waitForURL(/data-sources\\/new\\?type=file/);\n\n      await page.setInputFiles('[name=files]', 'res/sample.pdf');\n\n      await page.getByRole('textbox', { name: 'Datasource Name' }).fill('Simple DataSource');\n\n      await page.getByRole('button', { name: 'Create' }).click();\n      await page.waitForURL(/\\/knowledge-bases\\/\\d+\\/data-sources/);\n\n    });\n\n    await test.step(`Wait for index progress`, async () => {\n      await pollKbOverviewUntill(page, kbId, overview => {\n        expect(overview.documents.total).toBe(1);\n        return !!overview.vector_index.completed && (!enableKnowledgeGraph || !!overview.kg_index.completed);\n      });\n    });\n    return kbId;\n  });\n}\n\nasync function pollKbOverviewUntill (page: Page, kbId: number, isOk: (json: any) => boolean) {\n  await test.step('Poll kb overview api', async () => {\n    let i = 0;\n    while (true) {\n      const ok = await test.step(`Poll rounds ${++i}`, async () => {\n        await page.waitForTimeout(500);\n        const response = await page.request.get(`/api/v1/admin/knowledge_bases/${kbId}/overview`);\n        expect(response.ok()).toBe(true);\n        const overview = await response.json();\n        return isOk(overview);\n      });\n      if (ok) {\n        break;\n      }\n    }\n  });\n}\n"
  },
  {
    "path": "e2e/tests/site-settings.spec.ts",
    "content": "import { expect, test } from '@playwright/test';\nimport { loginViaApi } from '../utils/login';\n\ntest.describe('Site Sittings', () => {\n  test('Basic Settings', async ({ page, browser, baseURL }) => {\n    const homePage = await test.step('Visit Settings Page', async () => {\n      await loginViaApi(page);\n      await page.goto('/site-settings');\n      const homePage = await browser.newPage({\n        baseURL,\n      });\n      await homePage.goto('/');\n      return homePage;\n    });\n\n    await test.step('Title and Description', async () => {\n      await page.getByLabel('Title', { exact: true }).fill('FooBar.AI');\n      await submitAndWaitSavedByLabel('Title');\n\n      await page.getByLabel('Description', { exact: true }).fill('FooBar AI Description');\n      await submitAndWaitSavedByLabel('Description');\n\n      await page.getByLabel('Homepage Title', { exact: true }).fill('Ask anything about FooBar');\n      await submitAndWaitSavedByLabel('Homepage Title');\n\n      await page.reload();\n\n      await homePage.waitForTimeout(7000); // wait for settings cache\n      await homePage.reload();\n      expect(await homePage.title()).toBe('FooBar.AI');\n      await expect(homePage.locator('h1')).toHaveText('Ask anything about FooBar');\n      await expect(homePage.locator('h1 + p')).toHaveText('FooBar AI Description');\n      await expect(homePage.locator('meta[name=description]')).toHaveAttribute('content', 'FooBar AI Description');\n    });\n\n    async function submitAndWaitSavedByLabel (label: string) {\n      const button = page.getByText(label, { exact: true }).locator('..').locator('..').getByRole('button', { name: 'Save', exact: true });\n\n      // Click the save button in the field form\n      await button.click();\n\n      // Wait the save button to be vanished. (Saved)\n      await button.waitFor({ state: 'hidden' });\n    }\n  });\n});\n"
  },
  {
    "path": "e2e/tests/widget.spec.ts",
    "content": "import { expect, type Locator, type Page, test } from '@playwright/test';\nimport { getChatRequestPromise, QUESTION, testNewChat } from '../utils/chat';\n\ntest('JS Widget', async ({ page }) => {\n  await page.goto('/');\n  await page.getByRole('button', { name: 'Ask AI' }).waitFor({ state: 'visible' });\n  expect(await page.evaluate('tidbai')).toMatchObject({ open: false });\n});\n\ntest('Embedded JS Widget with trigger button', async ({ page }) => {\n  const trigger = await test.step('Wait trigger visible and tidbai object ready', async () => {\n    await page.goto('http://localhost:4001/widget.html');\n    const trigger = page.getByRole('button', { name: 'Ask AI' });\n    await trigger.waitFor({ state: 'visible' });\n    expect(await page.evaluate('tidbai')).toMatchObject({ open: false });\n    return trigger;\n  });\n\n  const dialog = await test.step('Click and show dialog', async () => {\n    await trigger.click();\n\n    const dialog = page.getByRole('dialog', { name: 'Ask AI' });\n    await dialog.waitFor({ state: 'visible' });\n\n    return dialog;\n  });\n\n  await testWidgetChat(page, dialog);\n});\n\n// Used by docs.pingcap.com\ntest('Embedded JS Widget controlled by js', async ({ page }) => {\n  await test.step('Wait trigger visible and tidbai object ready', async () => {\n    await page.goto('http://localhost:4001/widget-controlled.html');\n    const trigger = page.getByRole('button', { name: 'Ask AI' });\n    await expect(trigger).toBeHidden();\n\n    await page.waitForFunction(() => (window as any).tidbai);\n    expect(await page.evaluate('window.tidbai')).toMatchObject({ open: false });\n  });\n\n  const dialog = await test.step('JS api call and show dialog', async () => {\n    await page.evaluate('tidbai.open = true');\n\n    const dialog = page.getByRole('dialog', { name: 'Ask AI' });\n    await dialog.waitFor({ state: 'visible' });\n\n    return dialog;\n  });\n\n  await testWidgetChat(page, dialog);\n});\n\nasync function testWidgetChat (page: Page, dialog: Locator) {\n  await test.step('Fill in question', async () => {\n    const input = dialog.getByPlaceholder('Input your question here...');\n    await input.focus();\n    await input.fill(QUESTION);\n  });\n\n  const chatRequestPromise = await test.step('Trigger ask by press ControlOrMeta+Enter', async () => {\n    const chatRequestPromise = getChatRequestPromise(page, 'http://127.0.0.1:3000');\n    await page.keyboard.press('ControlOrMeta+Enter');\n\n    return chatRequestPromise;\n  });\n\n  await testNewChat(page, chatRequestPromise, false);\n}"
  },
  {
    "path": "e2e/utils/chat.ts",
    "content": "import { expect, type Page, type Request, test } from '@playwright/test';\n\nexport const QUESTION = 'What is the content of sample.pdf?';\n\nexport function getChatRequestPromise (page: Page, baseURL: string) {\n  return page.waitForRequest(request => request.url() === `${baseURL}/api/v1/chats` && request.method() === 'POST');\n}\n\nexport async function testNewChat (page: Page, chatRequest: Request, validatePageUrlAndTitle: boolean, feedbackLike?: boolean) {\n  await test.step('Wait page changes', async () => {\n    if (validatePageUrlAndTitle) {\n      await page.waitForURL(/\\/c\\/.+/);\n      expect(await page.title()).toContain(QUESTION);\n    }\n    await page.getByRole('heading', { name: QUESTION }).waitFor({ state: 'visible' });\n  });\n\n  const streamText = await test.step('Wait for chat stop', async () => {\n    const chatResponse = await chatRequest.response();\n    expect(chatResponse.ok()).toBe(true);\n\n    // Feedback button indicates chat ends.\n    await page.getByRole('button', { name: 'Like This Answer', exact: true }).waitFor({ state: 'visible' });\n\n    return await chatResponse.text();\n  });\n\n  await test.step('Check response text', async () => {\n    const lastLine = streamText.split('\\n').filter(t => !!t.trim()).slice(-1)[0];\n    expect(lastLine).toMatch(/^2:/);\n    const message = JSON.parse(lastLine.slice(2))[0].assistant_message;\n\n    expect(message.finished_at).toBeTruthy();\n    expect(message.content.trim().length).toBeGreaterThan(0);\n  });\n\n  if (typeof feedbackLike === 'boolean') {\n    await test.step('Feedback', async () => {\n      const feedbackButton = page.getByRole('button', { name: feedbackLike ? 'Like This Answer' : 'Dislike This Answer', exact: true });\n      await feedbackButton.click();\n      const dialog = page.getByRole('dialog', { name: 'Feedback' });\n\n      await dialog.waitFor({ state: 'visible' });\n      const comments = page.getByPlaceholder('Comments...');\n      await comments.pressSequentially(feedbackLike ? 'Good Good Good' : 'Bad Bad Bad');\n      await page.getByText('Add feedback', { exact: true }).click();\n\n      await dialog.waitFor({ state: 'hidden' });\n    });\n  }\n}\n"
  },
  {
    "path": "e2e/utils/forms.ts",
    "content": "import { expect, type Page, test } from '@playwright/test';\n\nexport async function selectOption (page: Page, name: string, value: string | RegExp, clickWindow = false) {\n  await test.step(`Select field ${name}`, async () => {\n    await page.getByRole('button', { name: name, exact: true }).click();\n    await page.getByRole('option', { name: value }).click();\n    if (clickWindow) {\n      await page.click('body');\n    }\n    await expect(page.getByRole('button', { name: name, exact: true })).toHaveText(value);\n  });\n}\n\nexport async function turnSwitch (page: Page, name: string, on: boolean = true) {\n  await test.step(`Turn ${on ? 'on' : 'off'} switch ${name}`, async () => {\n    const locator = page.getByRole('switch', { name: name, exact: true });\n    if (on) {\n      if (await locator.getAttribute('aria-checked') === 'true') {\n        return;\n      }\n      await locator.click();\n      await expect(locator).toHaveAttribute('aria-checked', 'true');\n    } else {\n      if (await locator.getAttribute('aria-checked') === 'false') {\n        return;\n      }\n      await locator.click();\n      await expect(locator).toHaveAttribute('aria-checked', 'false');\n    }\n  });\n}\n\nexport async function checkCheckbox (page: Page, name: string, on: boolean = true) {\n  await test.step(`${on ? 'Check' : 'Uncheck'} checkbox ${name}`, async () => {\n    const locator = page.getByRole('checkbox', { name: name, exact: true });\n    if (on) {\n      await locator.check();\n    } else {\n      await locator.uncheck();\n    }\n  });\n}\n"
  },
  {
    "path": "e2e/utils/login.ts",
    "content": "import { type APIRequestContext, expect, test } from '@playwright/test';\n\nexport async function loginViaApi ({ request }: { request: APIRequestContext }) {\n  await test.step('Login via API', async () => {\n    const usp = new URLSearchParams();\n    usp.set('username', process.env.USERNAME);\n    usp.set('password', process.env.PASSWORD);\n\n    const response = await request.post('/api/v1/auth/login', {\n      data: usp.toString(),\n      headers: {\n        'Content-Type': 'application/x-www-form-urlencoded',\n      },\n    });\n\n    expect(response.ok()).toBe(true);\n  });\n}\n"
  },
  {
    "path": "e2e/vercel.json",
    "content": "{\n  \"outputDirectory\": \"playwright-report\"\n}\n"
  },
  {
    "path": "frontend/.gitignore",
    "content": "node_modules\n.idea\n"
  },
  {
    "path": "frontend/.nvmrc",
    "content": "v22.12.0\n"
  },
  {
    "path": "frontend/.prettierignore",
    "content": "./app/src/components/ui/**"
  },
  {
    "path": "frontend/Dockerfile",
    "content": "FROM node:20-alpine AS base\n\n# 1. Install dependencies only when needed\nFROM base AS deps\n# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.\nRUN apk add --no-cache libc6-compat\n\nWORKDIR /tidb.ai\n\nRUN mkdir -p frontend/patches\nRUN mkdir -p frontend/app\nRUN mkdir -p frontend/packages/widget-react\n\n# Install dependencies based on the preferred package manager\nCOPY frontend/package.json ./frontend\nCOPY frontend/pnpm-*.yaml ./frontend\nCOPY frontend/patches/* ./frontend/patches\nCOPY frontend/app/package.json ./frontend/app\nCOPY frontend/packages/widget-react/package.json ./frontend/packages/widget-react\n\nWORKDIR /tidb.ai/frontend\n\nRUN npm install -g corepack@latest\nRUN corepack enable pnpm\nRUN pnpm i --frozen-lockfile\n\n\n# 2. Rebuild the source code only when needed\nFROM base AS builder\n# Need git client to fetch git revision info\nRUN apk add --no-cache git\nWORKDIR /tidb.ai\nCOPY --from=deps /tidb.ai/frontend/node_modules ./frontend/node_modules\nCOPY --from=deps /tidb.ai/frontend/app/node_modules ./frontend/app/node_modules\nCOPY --from=deps /tidb.ai/frontend/packages/widget-react/node_modules ./frontend/packages/widget-react/node_modules\n# For symbol link of root README.md\nCOPY . .\n\n# This will do the trick, use the corresponding env file for each environment.\n#COPY .env.production.sample .env.production\n\nWORKDIR /tidb.ai/frontend\n\nENV BASE_URL=\"\"\nENV SITE_URL=\"\"\n\nRUN rm -f app/.env\nRUN echo BASE_URL=${BASE_URL:-'\"\"'} >> app/.env.production\n\nRUN npm install -g corepack@latest\nRUN corepack enable pnpm\nRUN pnpm run build:docker\nRUN pnpm run build:widget-react\n\n# 3. Production image, copy all the files and run next\nFROM base AS runner\nWORKDIR /tidb.ai\n\nENV NODE_ENV=production\nENV PORT=3000\nENV HOSTNAME=0.0.0.0\n\nRUN addgroup -g 1001 -S nodejs\nRUN adduser -S nextjs -u 1001\n\n# Automatically leverage output traces to reduce image size\n# https://nextjs.org/docs/advanced-features/output-file-tracing\nCOPY --from=builder --chown=nextjs:nodejs /tidb.ai/frontend/app/.next/standalone .\nCOPY --from=builder --chown=nextjs:nodejs /tidb.ai/frontend/app/.next/static app/.next/static\nCOPY --from=builder /tidb.ai/frontend/app/public app/public\n\nUSER nextjs\n\nEXPOSE 3000\n\nCMD [\"node\", \"app/server.js\"]\n"
  },
  {
    "path": "frontend/app/.eslintrc.json",
    "content": "{\n  \"extends\": [\n    \"next/core-web-vitals\",\n    \"plugin:storybook/recommended\"\n  ]\n}\n"
  },
  {
    "path": "frontend/app/.gitignore",
    "content": "# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.\n\n# dependencies\n/node_modules\n/.pnp\n.pnp.js\n.yarn/install-state.gz\n\n# testing\n/coverage\n\n# next.js\n/.next/\n/out/\n\n# production\n/build\n\n# misc\n.DS_Store\n*.pem\n\n# debug\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\n\n# local env files\n.env*.local\n\n# vercel\n.vercel\n\n# typescript\n*.tsbuildinfo\nnext-env.d.ts\n\ncertificates\n\npublic/widget.js\n\npublic/robots.txt\npublic/sitemap.xml\npublic/sitemap-*.xml\n\n*storybook.log\nstorybook-static\n"
  },
  {
    "path": "frontend/app/.storybook/main.ts",
    "content": "import type { StorybookConfig } from '@storybook/nextjs';\nimport MonacoEditorWebpackPlugin = require('monaco-editor-webpack-plugin');\n\nconst config: StorybookConfig = {\n  stories: [\n    '../src/!(pages)/**/*.stories.@(js|jsx|mjs|ts|tsx|mdx)',\n  ],\n  addons: [\n    '@storybook/addon-onboarding',\n    '@storybook/addon-links',\n    '@storybook/addon-essentials',\n    '@chromatic-com/storybook',\n    '@storybook/addon-interactions',\n  ],\n  framework: {\n    name: '@storybook/nextjs',\n    options: {},\n  },\n  webpack (config) {\n    config.plugins.push(new MonacoEditorWebpackPlugin({\n      languages: ['json'],\n      filename: 'static/[name].worker.js',\n    }));\n    return config;\n  },\n  staticDirs: ['../public'],\n};\nexport default config;\n"
  },
  {
    "path": "frontend/app/.storybook/preview.ts",
    "content": "import type { Preview } from \"@storybook/react\";\nimport '../src/app/globals.css';\nimport '../src/app/chart-theme.css';\n\nconst preview: Preview = {\n  parameters: {\n    controls: {\n      matchers: {\n        color: /(background|color)$/i,\n        date: /Date$/i,\n      },\n    },\n  },\n};\n\nexport default preview;\n"
  },
  {
    "path": "frontend/app/README.md",
    "content": "This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).\n\n## Getting Started\n\nFirst, run the development server:\n\n```bash\nnpm run dev\n# or\nyarn dev\n# or\npnpm dev\n# or\nbun dev\n```\n\nOpen [http://localhost:3000](http://localhost:3000) with your browser to see the result.\n\nYou can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.\n\nThis project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.\n\n## Learn More\n\nTo learn more about Next.js, take a look at the following resources:\n\n- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.\n- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.\n\nYou can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!\n\n## Deploy on Vercel\n\nThe easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.\n\nCheck out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.\n"
  },
  {
    "path": "frontend/app/components.json",
    "content": "{\n  \"$schema\": \"https://ui.shadcn.com/schema.json\",\n  \"style\": \"default\",\n  \"rsc\": true,\n  \"tsx\": true,\n  \"tailwind\": {\n    \"config\": \"tailwind.config.ts\",\n    \"css\": \"src/app/globals.css\",\n    \"baseColor\": \"zinc\",\n    \"cssVariables\": true,\n    \"prefix\": \"\"\n  },\n  \"aliases\": {\n    \"components\": \"@/components\",\n    \"utils\": \"@/lib/utils\"\n  }\n}\n"
  },
  {
    "path": "frontend/app/jest.config.ts",
    "content": "/**\n * For a detailed explanation regarding each configuration property, visit:\n * https://jestjs.io/docs/configuration\n */\n\nimport type { Config } from 'jest';\nimport nextJest from 'next/jest';\n\nconst createJestConfig = nextJest({\n  // Provide the path to your Next.js app to load next.config.js and .env files in your test environment\n  dir: './',\n});\n\nconst config: Config = {\n  extensionsToTreatAsEsm: ['.ts', '.tsx'],\n\n  // All imported modules in your tests should be mocked automatically\n  // automock: false,\n\n  // Stop running tests after `n` failures\n  // bail: 0,\n\n  // The directory where Jest should store its cached dependency information\n  // cacheDirectory: \"/private/var/folders/46/7yp8x0hd3rs0j60c_ymsjgpc0000gn/T/jest_dx\",\n\n  // Automatically clear mock calls, instances, contexts and results before every test\n  clearMocks: true,\n\n  // Indicates whether the coverage information should be collected while executing the test\n  collectCoverage: true,\n\n  // An array of glob patterns indicating a set of files for which coverage information should be collected\n  // collectCoverageFrom: undefined,\n\n  // The directory where Jest should output its coverage files\n  coverageDirectory: 'coverage',\n\n  // An array of regexp pattern strings used to skip coverage collection\n  // coveragePathIgnorePatterns: [\n  //   \"/node_modules/\"\n  // ],\n\n  // Indicates which provider should be used to instrument code for coverage\n  coverageProvider: 'v8',\n\n  // A list of reporter names that Jest uses when writing coverage reports\n  // coverageReporters: [\n  //   \"json\",\n  //   \"text\",\n  //   \"lcov\",\n  //   \"clover\"\n  // ],\n\n  // An object that configures minimum threshold enforcement for coverage results\n  // coverageThreshold: undefined,\n\n  // A path to a custom dependency extractor\n  // dependencyExtractor: undefined,\n\n  // Make calling deprecated APIs throw helpful error messages\n  // errorOnDeprecated: false,\n\n  // The default configuration for fake timers\n  // fakeTimers: {\n  //   \"enableGlobally\": false\n  // },\n\n  // Force coverage collection from ignored files using an array of glob patterns\n  // forceCoverageMatch: [],\n\n  // A path to a module which exports an async function that is triggered once before all test suites\n  // globalSetup: undefined,\n\n  // A path to a module which exports an async function that is triggered once after all test suites\n  // globalTeardown: undefined,\n\n  // A set of global variables that need to be available in all test environments\n  // globals: {},\n\n  // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.\n  // maxWorkers: \"50%\",\n\n  // An array of directory names to be searched recursively up from the requiring module's location\n  // moduleDirectories: [\n  //   \"node_modules\"\n  // ],\n\n  // An array of file extensions your modules use\n  // moduleFileExtensions: [\n  //   \"js\",\n  //   \"mjs\",\n  //   \"cjs\",\n  //   \"jsx\",\n  //   \"ts\",\n  //   \"tsx\",\n  //   \"json\",\n  //   \"node\"\n  // ],\n\n  // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module\n  // moduleNameMapper: {},\n\n  // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader\n  // modulePathIgnorePatterns: [],\n\n  // Activates notifications for test results\n  // notify: false,\n\n  // An enum that specifies notification mode. Requires { notify: true }\n  // notifyMode: \"failure-change\",\n\n  // A preset that is used as a base for Jest's configuration\n  // preset: undefined,\n\n  // Run tests from one or more projects\n  // projects: undefined,\n\n  // Use this configuration option to add custom reporters to Jest\n  // reporters: undefined,\n\n  // Automatically reset mock state before every test\n  // resetMocks: false,\n\n  // Reset the module registry before running each individual test\n  // resetModules: false,\n\n  // A path to a custom resolver\n  // resolver: undefined,\n\n  // Automatically restore mock state and implementation before every test\n  // restoreMocks: false,\n\n  // The root directory that Jest should scan for tests and modules within\n  // rootDir: undefined,\n\n  // A list of paths to directories that Jest should use to search for files in\n  // roots: [\n  //   \"<rootDir>\"\n  // ],\n\n  // Allows you to use a custom runner instead of Jest's default test runner\n  // runner: \"jest-runner\",\n\n  // The paths to modules that run some code to configure or set up the testing environment before each test\n  setupFiles: ['<rootDir>/jest.polyfills.js'],\n\n  // A list of paths to modules that run some code to configure or set up the testing framework before each test\n  // setupFilesAfterEnv: [],\n\n  // The number of seconds after which a test is considered as slow and reported as such in the results.\n  // slowTestThreshold: 5,\n\n  // A list of paths to snapshot serializer modules Jest should use for snapshot testing\n  // snapshotSerializers: [],\n\n  // The test environment that will be used for testing\n  testEnvironment: 'jsdom',\n\n  // Options that will be passed to the testEnvironment\n  // testEnvironmentOptions: {},\n\n  // Adds a location field to test results\n  // testLocationInResults: false,\n\n  // The glob patterns Jest uses to detect test files\n  testMatch: [\n    \"**/__tests__/**/*.[jt]s?(x)\",\n    \"**/?(*.)+(spec|test).[tj]s?(x)\"\n  ],\n\n  // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped\n  // testPathIgnorePatterns: [\n  //   \"/node_modules/\"\n  // ],\n\n  // The regexp pattern or array of patterns that Jest uses to detect test files\n  // testRegex: [],\n\n  // This option allows the use of a custom results processor\n  // testResultsProcessor: undefined,\n\n  // This option allows use of a custom test runner\n  // testRunner: \"jest-circus/runner\",\n\n  // A map from regular expressions to paths to transformers\n  transform: {\n    '\\\\.ya?ml$': 'jest-transform-yaml',\n    '^.+\\\\.tsx?$': [\n      'ts-jest',\n      {\n        useESM: true,\n      },\n    ],\n  },\n\n  // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation\n  transformIgnorePatterns: [\n    '/node_modules/',\n    '\\\\.pnp\\\\.[^\\\\/]+$',\n    // 'node_modules/(?!(rehype-react))',\n  ],\n\n  // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them\n  // unmockedModulePathPatterns: undefined,\n\n  // Indicates whether each individual test should be reported during the run\n  // verbose: undefined,\n\n  // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode\n  // watchPathIgnorePatterns: [],\n\n  // Whether to use watchman for file crawling\n  // watchman: true,\n};\n\nexport default createJestConfig(config);\n"
  },
  {
    "path": "frontend/app/jest.polyfills.js",
    "content": "// jest.polyfills.js\n/**\n * @note The block below contains polyfills for Node.js globals\n * required for Jest to function when running JSDOM tests.\n * These HAVE to be require's and HAVE to be in this exact\n * order, since \"undici\" depends on the \"TextEncoder\" global API.\n *\n * Consider migrating to a more modern test runner if\n * you don't want to deal with this.\n */\n\nconst { TextDecoder, TextEncoder } = require(\"node:util\");\nconst { ReadableStream, TransformStream } = require('node:stream/web')\n\nObject.defineProperties(globalThis, {\n  TextDecoder: { value: TextDecoder },\n  TextEncoder: { value: TextEncoder },\n  ReadableStream: { value: ReadableStream },\n  TransformStream: { value: TransformStream },\n})\n\nconst { Blob, File } = require(\"node:buffer\")\nconst { fetch, Headers, FormData, Request, Response } = require(\"undici\")\n\nObject.defineProperties(globalThis, {\n  fetch: { value: fetch, writable: true },\n  Blob: { value: Blob },\n  File: { value: File },\n  Headers: { value: Headers },\n  FormData: { value: FormData },\n  Request: { value: Request },\n  Response: { value: Response },\n})\n\nclass ResizeObserver {\n  observe() {\n  }\n\n  disconnect() {\n  }\n}\n\nObject.defineProperties(globalThis, {\n  ResizeObserver: { value: ResizeObserver }\n})\n"
  },
  {
    "path": "frontend/app/next-sitemap.config.js",
    "content": "module.exports = {\n  siteUrl: process.env.SITE_URL || 'https://tidb.ai',\n  generateRobotsTxt: true,// (optional)\n  // ...other options\n}\n"
  },
  {
    "path": "frontend/app/next.config.ts",
    "content": "import MonacoWebpackPlugin from 'monaco-editor-webpack-plugin';\nimport { NextConfig } from 'next';\n\nconst nextConfig: NextConfig = {\n  output: process.env.STANDALONE ? 'standalone' : undefined,\n  transpilePackages: ['monaco-editor'],\n  experimental: {\n    optimizePackageImports: ['ai', 'lucide-react'],\n    turbo: {\n      rules: {\n        '*.svg': {\n          loaders: ['@svgr/webpack'],\n          as: '*.js',\n        },\n      },\n    },\n  },\n  webpack (config, options) {\n    config.module.rules.push({\n      test: /\\.svg$/,\n      use: '@svgr/webpack',\n    });\n    if (!options.isServer) {\n      config.plugins.push(new MonacoWebpackPlugin({\n        languages: ['json', 'markdown'],\n        filename: 'static/[name].worker.js',\n      }));\n    }\n    return config;\n  },\n};\n\nexport default nextConfig;\n"
  },
  {
    "path": "frontend/app/notice.md",
    "content": "Deprecating `react-hook-form`. Use `@tanstack/form` instead."
  },
  {
    "path": "frontend/app/package.json",
    "content": "{\n  \"name\": \"app\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"scripts\": {\n    \"dev\": \"next dev\",\n    \"dev:local\": \"next dev\",\n    \"build\": \"next build && next-sitemap\",\n    \"build:standalone\": \"STANDALONE=1 next build --no-lint && next-sitemap\",\n    \"start\": \"next start\",\n    \"lint\": \"next lint\",\n    \"test\": \"NODE_OPTIONS=\\\"$NODE_OPTIONS --experimental-vm-modules\\\" jest\",\n    \"storybook\": \"storybook dev -p 6006\",\n    \"build-storybook\": \"storybook build\"\n  },\n  \"dependencies\": {\n    \"@hookform/resolvers\": \"^3.9.1\",\n    \"@next/third-parties\": \"15.1.9\",\n    \"@radix-ui/react-accordion\": \"^1.2.3\",\n    \"@radix-ui/react-alert-dialog\": \"^1.1.6\",\n    \"@radix-ui/react-aspect-ratio\": \"^1.1.2\",\n    \"@radix-ui/react-avatar\": \"^1.1.3\",\n    \"@radix-ui/react-checkbox\": \"^1.1.4\",\n    \"@radix-ui/react-collapsible\": \"^1.1.3\",\n    \"@radix-ui/react-context-menu\": \"^2.2.6\",\n    \"@radix-ui/react-dialog\": \"^1.1.6\",\n    \"@radix-ui/react-dropdown-menu\": \"^2.1.6\",\n    \"@radix-ui/react-hover-card\": \"^1.1.6\",\n    \"@radix-ui/react-label\": \"^2.1.2\",\n    \"@radix-ui/react-menubar\": \"^1.1.6\",\n    \"@radix-ui/react-navigation-menu\": \"^1.2.5\",\n    \"@radix-ui/react-popover\": \"^1.1.6\",\n    \"@radix-ui/react-progress\": \"^1.1.2\",\n    \"@radix-ui/react-radio-group\": \"^1.2.3\",\n    \"@radix-ui/react-scroll-area\": \"^1.2.3\",\n    \"@radix-ui/react-select\": \"^2.1.6\",\n    \"@radix-ui/react-separator\": \"^1.1.2\",\n    \"@radix-ui/react-slider\": \"^1.2.3\",\n    \"@radix-ui/react-slot\": \"^1.1.2\",\n    \"@radix-ui/react-switch\": \"^1.1.3\",\n    \"@radix-ui/react-tabs\": \"^1.1.3\",\n    \"@radix-ui/react-toast\": \"^1.2.6\",\n    \"@radix-ui/react-toggle\": \"^1.1.2\",\n    \"@radix-ui/react-toggle-group\": \"^1.1.2\",\n    \"@radix-ui/react-tooltip\": \"^1.1.8\",\n    \"@tailwindcss/typography\": \"^0.5.15\",\n    \"class-variance-authority\": \"^0.7.1\",\n    \"clsx\": \"^2.1.1\",\n    \"cmdk\": \"^1.0.4\",\n    \"date-fns\": \"^3.6.0\",\n    \"embla-carousel-react\": \"^8.5.1\",\n    \"force-graph\": \"^1.49.6\",\n    \"input-otp\": \"^1.4.1\",\n    \"js-cookie\": \"^3.0.5\",\n    \"lucide-react\": \"^0.400.0\",\n    \"merge-refs\": \"^1.3.0\",\n    \"next\": \"15.1.11\",\n    \"next-themes\": \"^0.4.4\",\n    \"react\": \"19.0.0\",\n    \"react-day-picker\": \"^8.10.1\",\n    \"react-dom\": \"19.0.0\",\n    \"react-hook-form\": \"^7.54.0\",\n    \"react-is\": \"19.0.0\",\n    \"react-resizable-panels\": \"^2.1.7\",\n    \"recharts\": \"^2.15.1\",\n    \"rehype-highlight\": \"^7.0.1\",\n    \"rehype-react\": \"^8.0.0\",\n    \"remark-gfm\": \"^4.0.0\",\n    \"remark-parse\": \"^11.0.0\",\n    \"remark-rehype\": \"^11.1.1\",\n    \"sonner\": \"^1.7.1\",\n    \"tailwind-merge\": \"^2.5.5\",\n    \"tailwindcss-animate\": \"^1.0.7\",\n    \"unified\": \"^11.0.5\",\n    \"vaul\": \"^1.1.1\",\n    \"zod\": \"^3.24.0\"\n  },\n  \"devDependencies\": {\n    \"@chromatic-com/storybook\": \"^1.9.0\",\n    \"@jest/globals\": \"^29.7.0\",\n    \"@storybook/addon-essentials\": \"^8.4.7\",\n    \"@storybook/addon-interactions\": \"^8.4.7\",\n    \"@storybook/addon-links\": \"^8.4.7\",\n    \"@storybook/addon-onboarding\": \"^8.4.7\",\n    \"@storybook/blocks\": \"^8.4.7\",\n    \"@storybook/nextjs\": \"^8.4.7\",\n    \"@storybook/react\": \"^8.4.7\",\n    \"@storybook/test\": \"^8.4.7\",\n    \"@svgr/webpack\": \"^8.1.0\",\n    \"@tanstack/react-form\": \"^0.40.4\",\n    \"@tanstack/react-table\": \"^8.20.5\",\n    \"@tanstack/table-core\": \"^8.20.5\",\n    \"@tanstack/zod-form-adapter\": \"^0.40.4\",\n    \"@testing-library/jest-dom\": \"^6.6.3\",\n    \"@testing-library/react\": \"^16.1.0\",\n    \"@types/d3\": \"^7.4.3\",\n    \"@types/gtag.js\": \"^0.0.20\",\n    \"@types/is-hotkey\": \"^0.1.10\",\n    \"@types/jest\": \"^29.5.14\",\n    \"@types/js-cookie\": \"^3.0.6\",\n    \"@types/mdast\": \"^4.0.4\",\n    \"@types/node\": \"^20.17.9\",\n    \"@types/react\": \"19.0.1\",\n    \"@types/react-dom\": \"19.0.2\",\n    \"@types/react-is\": \"^19.0.0\",\n    \"@types/wcwidth\": \"^1.0.2\",\n    \"@uiw/react-json-view\": \"2.0.0-alpha.30\",\n    \"ai\": \"^3.4.33\",\n    \"change-case-all\": \"^2.1.0\",\n    \"copy-to-clipboard\": \"^3.3.3\",\n    \"d3\": \"^7.9.0\",\n    \"eslint\": \"^9.16.0\",\n    \"eslint-config-next\": \"15.1.9\",\n    \"eslint-plugin-storybook\": \"^0.11.1\",\n    \"eventemitter3\": \"^5.0.1\",\n    \"fast-equals\": \"^5.0.1\",\n    \"filesize\": \"^10.1.6\",\n    \"framer-motion\": \"^11.18.2\",\n    \"git-revision-webpack-plugin\": \"^5.0.0\",\n    \"hastscript\": \"^9.0.0\",\n    \"highlight.js\": \"^11.10.0\",\n    \"is-hotkey\": \"^0.2.0\",\n    \"jest\": \"^29.7.0\",\n    \"jest-environment-jsdom\": \"^29.7.0\",\n    \"jest-transform-yaml\": \"^1.1.2\",\n    \"monaco-editor\": \"^0.50.0\",\n    \"monaco-editor-webpack-plugin\": \"^7.1.0\",\n    \"next-sitemap\": \"^4.2.3\",\n    \"postcss\": \"^8.4.49\",\n    \"react-textarea-autosize\": \"^8.5.7\",\n    \"remark-directive\": \"^3.0.0\",\n    \"sass\": \"^1.82.0\",\n    \"sql-formatter\": \"^15.4.6\",\n    \"storybook\": \"^8.4.7\",\n    \"swr\": \"^2.2.5\",\n    \"tailwindcss\": \"^3.4.16\",\n    \"ts-jest\": \"^29.2.5\",\n    \"ts-node\": \"^10.9.2\",\n    \"typescript\": \"^5.7.2\",\n    \"undici\": \"^6.21.0\",\n    \"unist-util-visit\": \"^5.0.0\",\n    \"wcwidth\": \"^1.0.1\",\n    \"yaml-loader\": \"^0.8.1\"\n  },\n  \"imports\": {\n    \"#*\": {\n      \"react-server\": \"./src/*.react-server.ts\",\n      \"storybook\": \"./src/*.mock.ts\",\n      \"tidbai-widget\": \"./src/*.tidbai-widget.ts\",\n      \"default\": \"./src/*.ts\"\n    }\n  }\n}\n"
  },
  {
    "path": "frontend/app/postcss.config.mjs",
    "content": "/** @type {import('postcss-load-config').Config} */\nconst config = {\n  plugins: {\n    tailwindcss: {},\n  },\n};\n\nexport default config;\n"
  },
  {
    "path": "frontend/app/public/chats.mock.txt",
    "content": "0:\"\"\n2:[{\"chat\":{\"origin\": null,\"\"\"updated_at\":\"2024-07-09T09:55:30\",\"id\":\"00000000-0000-0000-0000-00000000000\",\"title\":\"what is tidb?\",\"engine_id\":1,\"engine_options\":\"{\\\"llm\\\":{\\\"provider\\\":\\\"openai\\\",\\\"openai_chat_model\\\":\\\"gpt-4o\\\",\\\"gemini_chat_model\\\":\\\"models/gemini-1.5-flash\\\",\\\"reranker_provider\\\":\\\"jinaai\\\",\\\"reranker_top_k\\\":10,\\\"intent_graph_knowledge\\\":\\\"Given a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\\\\n\\\\nKnowledge sub-queries:\\\\n\\\\n{% for sub_query, data in sub_queries.items() %}\\\\n\\\\nSub-query: {{ sub_query }}\\\\n\\\\n  - Entities:\\\\n\\\\n{% for entity in data['entities'] %}\\\\n\\\\n    - Name: {{ entity.name }}\\\\n    - Description: {{ entity.description }}\\\\n\\\\n{% endfor %}\\\\n\\\\n  - Relationships:\\\\n\\\\n{% for relationship in data['relationships'] %}\\\\n\\\\n    - Description: {{ relationship.rag_description }}\\\\n    - Last Modified At: {{ relationship.last_modified_at }}\\\\n    - Meta: {{ relationship.meta | tojson(indent=2) }}\\\\n\\\\n{% endfor %}\\\\n\\\\n{% endfor %}\\\\n\\\",\\\"normal_graph_knowledge\\\":\\\"Given a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\\\\n\\\\n---------------------\\\\nEntities:\\\\n\\\\n{% for entity in entities %}\\\\n\\\\n- Name: {{ entity.name }}\\\\n- Description: {{ entity.description }}\\\\n\\\\n{% endfor %}\\\\n\\\\n---------------------\\\\n\\\\nKnowledge relationships:\\\\n\\\\n{% for relationship in relationships %}\\\\n\\\\n- Description: {{ relationship.rag_description }}\\\\n- Weight: {{ relationship.weight }}\\\\n- Last Modified At: {{ relationship.last_modified_at }}\\\\n- Meta: {{ relationship.meta | tojson(indent=2) }}\\\\n\\\\n{% endfor %}\\\\n\\\"},\\\"knowledge_graph\\\":{\\\"enabled\\\":true,\\\"depth\\\":2,\\\"include_meta\\\":true,\\\"with_degree\\\":false,\\\"using_intent_search\\\":true}}\",\"user_id\":\"01907db8-8850-795d-855b-552663c18c9f\",\"created_at\":\"2024-07-09T09:55:30\",\"deleted_at\":null},\"user_message\":{\"id\":120007,\"created_at\":\"2024-07-09T09:55:31\",\"role\":\"user\",\"trace_url\":null,\"finished_at\":null,\"user_id\":\"01907db8-8850-795d-855b-552663c18c9f\",\"updated_at\":\"2024-07-09T09:55:31\",\"ordinal\":1,\"content\":\"what is tidb?\",\"error\":null,\"sources\":[],\"chat_id\":\"00000000-0000-0000-0000-00000000000\"},\"assistant_message\":{\"id\":120008,\"created_at\":\"2024-07-09T09:55:31\",\"role\":\"assistant\",\"trace_url\":\"https://us.cloud.langfuse.com/trace/ccf8aea4-086b-4d72-a7a1-a05ef2e6830a\",\"finished_at\":null,\"user_id\":\"01907db8-8850-795d-855b-552663c18c9f\",\"updated_at\":\"2024-07-09T09:55:31\",\"ordinal\":2,\"content\":\"\",\"error\":null,\"sources\":[],\"chat_id\":\"00000000-0000-0000-0000-00000000000\"}}]\n8:[{\"state\":\"TRACE\",\"display\":\"Start knowledge graph searching ...\",\"context\":{\"langfuse_url\":\"https://us.cloud.langfuse.com/trace/ccf8aea4-086b-4d72-a7a1-a05ef2e6830a\"}}]\n8:[{\"state\":\"REFINE_QUESTION\",\"display\":\"Refine the user question ...\",\"context\":\"\"}]\n8:[{\"state\":\"SEARCH_RELATED_DOCUMENTS\",\"display\":\"Search related documents ...\",\"context\":\"\"}]\n8:[{\"state\":\"SOURCE_NODES\",\"display\":\"\",\"context\":[{\"id\":8600,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.5/tidb-faq\"},{\"id\":9057,\"name\":\"Overview\",\"source_uri\":\"https://docs.pingcap.com/tidbcloud/tidb-architecture\"},{\"id\":9373,\"name\":\"TiDB Introduction\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.1/overview\"},{\"id\":9865,\"name\":\"Overview\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.1/tidb-architecture\"},{\"id\":10191,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.1/tidb-faq\"},{\"id\":11370,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v6.5/tidb-faq\"},{\"id\":12985,\"name\":\"TiDB Introduction\",\"source_uri\":\"https://docs.pingcap.com/tidb/v5.4/overview\"},{\"id\":13942,\"name\":\"Overview\",\"source_uri\":\"https://docs.pingcap.com/tidb/v5.3/tidb-architecture\"},{\"id\":14218,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v5.3/tidb-faq\"}]}]\n0:\"\"\n0:\"###\"\n0:\" Comprehensive\"\n0:\" Overview\"\n0:\" of\"\n0:\" Ti\"\n0:\"DB\"\n0:\"\\n\\n\"\n0:\"####\"\n0:\" What\"\n0:\" is\"\n0:\" Ti\"\n0:\"DB\"\n0:\"?\\n\\n\"\n0:\"Ti\"\n0:\"DB\"\n0:\" is\"\n0:\" an\"\n0:\" open\"\n0:\"-source\"\n0:\" distributed\"\n0:\" SQL\"\n0:\" database\"\n0:\" designed\"\n0:\" to\"\n0:\" support\"\n0:\" Hybrid\"\n0:\" Transaction\"\n0:\"al\"\n0:\" and\"\n0:\" Analytical\"\n0:\" Processing\"\n0:\" (\"\n0:\"HT\"\n0:\"AP\"\n0:\")\"\n0:\" workloads\"\n0:\".\"\n0:\" It\"\n0:\" is\"\n0:\" My\"\n0:\"SQL\"\n0:\"-compatible\"\n0:\" and\"\n0:\" features\"\n0:\" horizontal\"\n0:\" scalability\"\n0:\",\"\n0:\" strong\"\n0:\" consistency\"\n0:\",\"\n0:\" and\"\n0:\" high\"\n0:\" availability\"\n0:\"[^\"\n0:\"1\"\n0:\"][\"\n0:\"^\"\n0:\"2\"\n0:\"].\"\n0:\" Ti\"\n0:\"DB\"\n0:\" aims\"\n0:\" to\"\n0:\" provide\"\n0:\" a\"\n0:\" one\"\n0:\"-stop\"\n0:\" database\"\n0:\" solution\"\n0:\" that\"\n0:\" covers\"\n0:\" OL\"\n0:\"TP\"\n0:\" (\"\n0:\"Online\"\n0:\" Transaction\"\n0:\"al\"\n0:\" Processing\"\n0:\"),\"\n0:\" OL\"\n0:\"AP\"\n0:\" (\"\n0:\"Online\"\n0:\" Analytical\"\n0:\" Processing\"\n0:\"),\"\n0:\" and\"\n0:\" HT\"\n0:\"AP\"\n0:\" services\"\n0:\"[^\"\n0:\"3\"\n0:\"].\\n\\n\"\n0:\"####\"\n0:\" Architecture\"\n0:\"\\n\\n\"\n0:\"The\"\n0:\" Ti\"\n0:\"DB\"\n0:\" cluster\"\n0:\" consists\"\n0:\" of\"\n0:\" three\"\n0:\" main\"\n0:\" components\"\n0:\"[^\"\n0:\"4\"\n0:\"]:\\n\\n\"\n0:\"1\"\n0:\".\"\n0:\" **\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Server\"\n0:\"**\"\n0:\":\"\n0:\" Handles\"\n0:\" SQL\"\n0:\" parsing\"\n0:\",\"\n0:\" query\"\n0:\" planning\"\n0:\",\"\n0:\" and\"\n0:\" execution\"\n0:\".\\n\"\n0:\"2\"\n0:\".\"\n0:\" **\"\n0:\"Ti\"\n0:\"KV\"\n0:\" Server\"\n0:\"**\"\n0:\":\"\n0:\" Acts\"\n0:\" as\"\n0:\" the\"\n0:\" distributed\"\n0:\" key\"\n0:\"-value\"\n0:\" storage\"\n0:\" engine\"\n0:\",\"\n0:\" storing\"\n0:\" the\"\n0:\" actual\"\n0:\" data\"\n0:\".\\n\"\n0:\"3\"\n0:\".\"\n0:\" **\"\n0:\"PD\"\n0:\" (\"\n0:\"Placement\"\n0:\" Driver\"\n0:\")\"\n0:\" Server\"\n0:\"**\"\n0:\":\"\n0:\" Man\"\n0:\"ages\"\n0:\" cluster\"\n0:\" metadata\"\n0:\",\"\n0:\" alloc\"\n0:\"ates\"\n0:\" timestamps\"\n0:\",\"\n0:\" and\"\n0:\" handles\"\n0:\" data\"\n0:\" placement\"\n0:\" and\"\n0:\" load\"\n0:\" balancing\"\n0:\".\\n\\n\"\n0:\"Additionally\"\n0:\",\"\n0:\" Ti\"\n0:\"DB\"\n0:\" includes\"\n0:\" other\"\n0:\" components\"\n0:\" such\"\n0:\" as\"\n0:\" Ti\"\n0:\"Flash\"\n0:\" for\"\n0:\" column\"\n0:\"ar\"\n0:\" storage\"\n0:\",\"\n0:\" Ti\"\n0:\"Spark\"\n0:\" for\"\n0:\" Spark\"\n0:\" integration\"\n0:\",\"\n0:\" Ti\"\n0:\"DB\"\n0:\" Bin\"\n0:\"log\"\n0:\" for\"\n0:\" data\"\n0:\" replication\"\n0:\",\"\n0:\" and\"\n0:\" Ti\"\n0:\"DB\"\n0:\" Lightning\"\n0:\" for\"\n0:\" high\"\n0:\"-performance\"\n0:\" data\"\n0:\" import\"\n0:\"[^\"\n0:\"5\"\n0:\"].\\n\\n\"\n0:\"####\"\n0:\" Key\"\n0:\" Features\"\n0:\"\\n\\n\"\n0:\"1\"\n0:\".\"\n0:\" **\"\n0:\"Sc\"\n0:\"al\"\n0:\"ability\"\n0:\"**\"\n0:\":\"\n0:\" Ti\"\n0:\"DB\"\n0:\" allows\"\n0:\" for\"\n0:\" easy\"\n0:\" horizontal\"\n0:\" scaling\"\n0:\" of\"\n0:\" both\"\n0:\" computing\"\n0:\" and\"\n0:\" storage\"\n0:\" resources\"\n0:\".\"\n0:\" The\"\n0:\" architecture\"\n0:\" separates\"\n0:\" computing\"\n0:\" from\"\n0:\" storage\"\n0:\",\"\n0:\" enabling\"\n0:\" independent\"\n0:\" scaling\"\n0:\"[^\"\n0:\"6\"\n0:\"].\"\n0:\" This\"\n0:\" design\"\n0:\" ensures\"\n0:\" flexibility\"\n0:\" and\"\n0:\" adaptability\"\n0:\" to\"\n0:\" changing\"\n0:\" workloads\"\n0:\"[^\"\n0:\"7\"\n0:\"].\\n\\n\"\n0:\"2\"\n0:\".\"\n0:\" **\"\n0:\"High\"\n0:\" Availability\"\n0:\"**\"\n0:\":\"\n0:\" Ti\"\n0:\"DB\"\n0:\" guarantees\"\n0:\" strong\"\n0:\" consistency\"\n0:\" and\"\n0:\" availability\"\n0:\" through\"\n0:\" its\"\n0:\" multi\"\n0:\"-re\"\n0:\"p\"\n0:\"lica\"\n0:\" architecture\"\n0:\" and\"\n0:\" Multi\"\n0:\"-R\"\n0:\"aft\"\n0:\" protocol\"\n0:\".\"\n0:\" Data\"\n0:\" is\"\n0:\" stored\"\n0:\" in\"\n0:\" multiple\"\n0:\" replicas\"\n0:\",\"\n0:\" and\"\n0:\" a\"\n0:\" transaction\"\n0:\" can\"\n0:\" only\"\n0:\" be\"\n0:\" committed\"\n0:\" when\"\n0:\" data\"\n0:\" has\"\n0:\" been\"\n0:\" successfully\"\n0:\" written\"\n0:\" into\"\n0:\" the\"\n0:\" majority\"\n0:\" of\"\n0:\" replicas\"\n0:\"[^\"\n0:\"8\"\n0:\"].\"\n0:\" This\"\n0:\" ensures\"\n0:\" data\"\n0:\" integrity\"\n0:\" even\"\n0:\" in\"\n0:\" the\"\n0:\" event\"\n0:\" of\"\n0:\" failures\"\n0:\"[^\"\n0:\"9\"\n0:\"].\\n\\n\"\n0:\"3\"\n0:\".\"\n0:\" **\"\n0:\"HT\"\n0:\"AP\"\n0:\" Cap\"\n0:\"abilities\"\n0:\"**\"\n0:\":\"\n0:\" Ti\"\n0:\"DB\"\n0:\" supports\"\n0:\" both\"\n0:\" row\"\n0:\"-based\"\n0:\" (\"\n0:\"Ti\"\n0:\"KV\"\n0:\")\"\n0:\" and\"\n0:\" column\"\n0:\"ar\"\n0:\" (\"\n0:\"Ti\"\n0:\"Flash\"\n0:\")\"\n0:\" storage\"\n0:\" engines\"\n0:\",\"\n0:\" enabling\"\n0:\" real\"\n0:\"-time\"\n0:\" processing\"\n0:\" of\"\n0:\" both\"\n0:\" transactional\"\n0:\" and\"\n0:\" analytical\"\n0:\" workloads\"\n0:\".\"\n0:\" Ti\"\n0:\"Flash\"\n0:\" uses\"\n0:\" the\"\n0:\" Multi\"\n0:\"-R\"\n0:\"aft\"\n0:\" Lear\"\n0:\"ner\"\n0:\" protocol\"\n0:\" to\"\n0:\" replicate\"\n0:\" data\"\n0:\" from\"\n0:\" Ti\"\n0:\"KV\"\n0:\" in\"\n0:\" real\"\n0:\" time\"\n0:\",\"\n0:\" ensuring\"\n0:\" consistent\"\n0:\" data\"\n0:\" across\"\n0:\" both\"\n0:\" engines\"\n0:\"[^\"\n0:\"10\"\n0:\"].\\n\\n\"\n0:\"4\"\n0:\".\"\n0:\" **\"\n0:\"Cloud\"\n0:\"-N\"\n0:\"ative\"\n0:\" Design\"\n0:\"**\"\n0:\":\"\n0:\" Ti\"\n0:\"DB\"\n0:\" is\"\n0:\" built\"\n0:\" for\"\n0:\" cloud\"\n0:\" environments\"\n0:\",\"\n0:\" offering\"\n0:\" flexible\"\n0:\" scalability\"\n0:\",\"\n0:\" reliability\"\n0:\",\"\n0:\" and\"\n0:\" security\"\n0:\" on\"\n0:\" various\"\n0:\" cloud\"\n0:\" platforms\"\n0:\".\"\n0:\" It\"\n0:\" integrates\"\n0:\" seamlessly\"\n0:\" with\"\n0:\" Kubernetes\"\n0:\" and\"\n0:\" offers\"\n0:\" a\"\n0:\" fully\"\n0:\" managed\"\n0:\" service\"\n0:\",\"\n0:\" Ti\"\n0:\"DB\"\n0:\" Cloud\"\n0:\"[^\"\n0:\"11\"\n0:\"].\\n\\n\"\n0:\"5\"\n0:\".\"\n0:\" **\"\n0:\"My\"\n0:\"SQL\"\n0:\" Compatibility\"\n0:\"**\"\n0:\":\"\n0:\" Ti\"\n0:\"DB\"\n0:\" is\"\n0:\" compatible\"\n0:\" with\"\n0:\" the\"\n0:\" My\"\n0:\"SQL\"\n0:\" \"\n0:\"5\"\n0:\".\"\n0:\"7\"\n0:\" protocol\"\n0:\" and\"\n0:\" ecosystem\"\n0:\",\"\n0:\" allowing\"\n0:\" for\"\n0:\" easy\"\n0:\" migration\"\n0:\" of\"\n0:\" applications\"\n0:\" with\"\n0:\" minimal\"\n0:\" code\"\n0:\" changes\"\n0:\"[^\"\n0:\"12\"\n0:\"].\"\n0:\" However\"\n0:\",\"\n0:\" it\"\n0:\" does\"\n0:\" not\"\n0:\" support\"\n0:\" certain\"\n0:\" My\"\n0:\"SQL\"\n0:\" features\"\n0:\" like\"\n0:\" triggers\"\n0:\",\"\n0:\" stored\"\n0:\" procedures\"\n0:\",\"\n0:\" user\"\n0:\"-defined\"\n0:\" functions\"\n0:\",\"\n0:\" and\"\n0:\" foreign\"\n0:\" keys\"\n0:\"[^\"\n0:\"13\"\n0:\"].\\n\\n\"\n0:\"####\"\n0:\" Distributed\"\n0:\" Transactions\"\n0:\"\\n\\n\"\n0:\"Ti\"\n0:\"DB\"\n0:\" supports\"\n0:\" distributed\"\n0:\" transactions\"\n0:\" across\"\n0:\" the\"\n0:\" cluster\"\n0:\" using\"\n0:\" a\"\n0:\" two\"\n0:\"-phase\"\n0:\" commit\"\n0:\" protocol\"\n0:\" with\"\n0:\" optim\"\n0:\"izations\"\n0:\" inspired\"\n0:\" by\"\n0:\" Google's\"\n0:\" Per\"\n0:\"col\"\n0:\"ator\"\n0:\".\"\n0:\" This\"\n0:\" model\"\n0:\" relies\"\n0:\" on\"\n0:\" a\"\n0:\" timestamp\"\n0:\" allocator\"\n0:\" to\"\n0:\" assign\"\n0:\" monot\"\n0:\"one\"\n0:\" increasing\"\n0:\" timestamps\"\n0:\" for\"\n0:\" each\"\n0:\" transaction\"\n0:\",\"\n0:\" ensuring\"\n0:\" conflict\"\n0:\" detection\"\n0:\"[^\"\n0:\"14\"\n0:\"].\"\n0:\" The\"\n0:\" PD\"\n0:\" server\"\n0:\" acts\"\n0:\" as\"\n0:\" the\"\n0:\" timestamp\"\n0:\" allocator\"\n0:\" in\"\n0:\" a\"\n0:\" Ti\"\n0:\"DB\"\n0:\" cluster\"\n0:\"[^\"\n0:\"15\"\n0:\"].\\n\\n\"\n0:\"####\"\n0:\" Ease\"\n0:\" of\"\n0:\" Use\"\n0:\"\\n\\n\"\n0:\"Ti\"\n0:\"DB\"\n0:\" is\"\n0:\" designed\"\n0:\" to\"\n0:\" be\"\n0:\" user\"\n0:\"-friendly\"\n0:\",\"\n0:\" allowing\"\n0:\" users\"\n0:\" to\"\n0:\" manage\"\n0:\" it\"\n0:\" with\"\n0:\" familiar\"\n0:\" My\"\n0:\"SQL\"\n0:\" tools\"\n0:\" and\"\n0:\" replace\"\n0:\" My\"\n0:\"SQL\"\n0:\" in\"\n0:\" applications\"\n0:\" without\"\n0:\" significant\"\n0:\" code\"\n0:\" changes\"\n0:\"[^\"\n0:\"16\"\n0:\"].\"\n0:\" It\"\n0:\" supports\"\n0:\" various\"\n0:\" programming\"\n0:\" languages\"\n0:\" through\"\n0:\" My\"\n0:\"SQL\"\n0:\" clients\"\n0:\" or\"\n0:\" drivers\"\n0:\"[^\"\n0:\"17\"\n0:\"].\\n\\n\"\n0:\"####\"\n0:\" Data\"\n0:\" Migration\"\n0:\" Tools\"\n0:\"\\n\\n\"\n0:\"Ti\"\n0:\"DB\"\n0:\" provides\"\n0:\" a\"\n0:\" suite\"\n0:\" of\"\n0:\" tools\"\n0:\" for\"\n0:\" migrating\"\n0:\",\"\n0:\" replic\"\n0:\"ating\"\n0:\",\"\n0:\" and\"\n0:\" backing\"\n0:\" up\"\n0:\" data\"\n0:\".\"\n0:\" These\"\n0:\" tools\"\n0:\" facilitate\"\n0:\" the\"\n0:\" transition\"\n0:\" from\"\n0:\" My\"\n0:\"SQL\"\n0:\" to\"\n0:\" Ti\"\n0:\"DB\"\n0:\",\"\n0:\" making\"\n0:\" the\"\n0:\" migration\"\n0:\" process\"\n0:\" straightforward\"\n0:\"[^\"\n0:\"18\"\n0:\"].\\n\\n\"\n0:\"###\"\n0:\" Conclusion\"\n0:\"\\n\\n\"\n0:\"Ti\"\n0:\"DB\"\n0:\" is\"\n0:\" a\"\n0:\" robust\"\n0:\",\"\n0:\" scalable\"\n0:\",\"\n0:\" and\"\n0:\" highly\"\n0:\" available\"\n0:\" distributed\"\n0:\" SQL\"\n0:\" database\"\n0:\" that\"\n0:\" offers\"\n0:\" comprehensive\"\n0:\" support\"\n0:\" for\"\n0:\" HT\"\n0:\"AP\"\n0:\" workloads\"\n0:\".\"\n0:\" Its\"\n0:\" compatibility\"\n0:\" with\"\n0:\" My\"\n0:\"SQL\"\n0:\",\"\n0:\" combined\"\n0:\" with\"\n0:\" its\"\n0:\" advanced\"\n0:\" features\"\n0:\" and\"\n0:\" cloud\"\n0:\"-native\"\n0:\" design\"\n0:\",\"\n0:\" makes\"\n0:\" it\"\n0:\" a\"\n0:\" versatile\"\n0:\" choice\"\n0:\" for\"\n0:\" various\"\n0:\" data\"\n0:\"-intensive\"\n0:\" applications\"\n0:\".\\n\\n\"\n0:\"[^\"\n0:\"1\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Overview\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/st\"\n0:\"able\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"2\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Introduction\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"5\"\n0:\".\"\n0:\"4\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"3\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Introduction\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"4\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Architecture\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-\"\n0:\"architecture\"\n0:\")\\n\"\n0:\"[^\"\n0:\"5\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Architecture\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"5\"\n0:\".\"\n0:\"3\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-\"\n0:\"architecture\"\n0:\")\\n\"\n0:\"[^\"\n0:\"6\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Key\"\n0:\" Features\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"7\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Key\"\n0:\" Features\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"5\"\n0:\".\"\n0:\"4\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"8\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" High\"\n0:\" Availability\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"9\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" High\"\n0:\" Availability\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"5\"\n0:\".\"\n0:\"4\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"10\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" HT\"\n0:\"AP\"\n0:\" Cap\"\n0:\"abilities\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"11\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Cloud\"\n0:\"-N\"\n0:\"ative\"\n0:\" Design\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"5\"\n0:\".\"\n0:\"4\"\n0:\"/\"\n0:\"overview\"\n0:\")\\n\"\n0:\"[^\"\n0:\"12\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" My\"\n0:\"SQL\"\n0:\" Compatibility\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"13\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" My\"\n0:\"SQL\"\n0:\" Compatibility\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"6\"\n0:\".\"\n0:\"5\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"14\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Distributed\"\n0:\" Transactions\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"15\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Distributed\"\n0:\" Transactions\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"6\"\n0:\".\"\n0:\"5\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"16\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Ease\"\n0:\" of\"\n0:\" Use\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"17\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Programming\"\n0:\" Language\"\n0:\" Support\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"7\"\n0:\".\"\n0:\"1\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-f\"\n0:\"aq\"\n0:\")\\n\"\n0:\"[^\"\n0:\"18\"\n0:\"]:\"\n0:\" [\"\n0:\"Ti\"\n0:\"DB\"\n0:\" Data\"\n0:\" Migration\"\n0:\" Tools\"\n0:\" |\"\n0:\" Ping\"\n0:\"CAP\"\n0:\" Docs\"\n0:\"](\"\n0:\"https\"\n0:\"://\"\n0:\"docs\"\n0:\".p\"\n0:\"ing\"\n0:\"cap\"\n0:\".com\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"/v\"\n0:\"5\"\n0:\".\"\n0:\"3\"\n0:\"/t\"\n0:\"id\"\n0:\"b\"\n0:\"-\"\n0:\"architecture\"\n0:\")\"\n0:\"\"\n8:[{\"state\":\"FINISHED\",\"display\":\"\",\"context\":\"\"}]\n2:[{\"chat\":{\"origin\": null,\"\"updated_at\":\"2024-07-09T09:55:30\",\"id\":\"00000000-0000-0000-0000-00000000000\",\"title\":\"what is tidb?\",\"engine_id\":1,\"engine_options\":\"{\\\"llm\\\":{\\\"provider\\\":\\\"openai\\\",\\\"openai_chat_model\\\":\\\"gpt-4o\\\",\\\"gemini_chat_model\\\":\\\"models/gemini-1.5-flash\\\",\\\"reranker_provider\\\":\\\"jinaai\\\",\\\"reranker_top_k\\\":10,\\\"intent_graph_knowledge\\\":\\\"Given a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\\\\n\\\\nKnowledge sub-queries:\\\\n\\\\n{% for sub_query, data in sub_queries.items() %}\\\\n\\\\nSub-query: {{ sub_query }}\\\\n\\\\n  - Entities:\\\\n\\\\n{% for entity in data['entities'] %}\\\\n\\\\n    - Name: {{ entity.name }}\\\\n    - Description: {{ entity.description }}\\\\n\\\\n{% endfor %}\\\\n\\\\n  - Relationships:\\\\n\\\\n{% for relationship in data['relationships'] %}\\\\n\\\\n    - Description: {{ relationship.rag_description }}\\\\n    - Last Modified At: {{ relationship.last_modified_at }}\\\\n    - Meta: {{ relationship.meta | tojson(indent=2) }}\\\\n\\\\n{% endfor %}\\\\n\\\\n{% endfor %}\\\\n\\\",\\\"normal_graph_knowledge\\\":\\\"Given a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\\\\n\\\\n---------------------\\\\nEntities:\\\\n\\\\n{% for entity in entities %}\\\\n\\\\n- Name: {{ entity.name }}\\\\n- Description: {{ entity.description }}\\\\n\\\\n{% endfor %}\\\\n\\\\n---------------------\\\\n\\\\nKnowledge relationships:\\\\n\\\\n{% for relationship in relationships %}\\\\n\\\\n- Description: {{ relationship.rag_description }}\\\\n- Weight: {{ relationship.weight }}\\\\n- Last Modified At: {{ relationship.last_modified_at }}\\\\n- Meta: {{ relationship.meta | tojson(indent=2) }}\\\\n\\\\n{% endfor %}\\\\n\\\"},\\\"knowledge_graph\\\":{\\\"enabled\\\":true,\\\"depth\\\":2,\\\"include_meta\\\":true,\\\"with_degree\\\":false,\\\"using_intent_search\\\":true}}\",\"user_id\":\"01907db8-8850-795d-855b-552663c18c9f\",\"created_at\":\"2024-07-09T09:55:30\",\"deleted_at\":null},\"user_message\":{\"post_verification_result_url\": null,\"id\":120007,\"created_at\":\"2024-07-09T09:55:31\",\"role\":\"user\",\"trace_url\":null,\"finished_at\":null,\"user_id\":\"01907db8-8850-795d-855b-552663c18c9f\",\"updated_at\":\"2024-07-09T09:55:31\",\"ordinal\":1,\"content\":\"what is tidb?\",\"error\":null,\"sources\":[],\"chat_id\":\"00000000-0000-0000-0000-00000000000\"},\"assistant_message\":{\"post_verification_result_url\": null,\"id\":120008,\"created_at\":\"2024-07-09T09:55:31\",\"role\":\"assistant\",\"trace_url\":\"https://us.cloud.langfuse.com/trace/ccf8aea4-086b-4d72-a7a1-a05ef2e6830a\",\"finished_at\":\"2024-07-09T09:58:07.692084Z\",\"user_id\":\"01907db8-8850-795d-855b-552663c18c9f\",\"ordinal\":2,\"content\":\"### Comprehensive Overview of TiDB\\n\\n#### What is TiDB?\\n\\nTiDB is an open-source distributed SQL database designed to support Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL-compatible and features horizontal scalability, strong consistency, and high availability[^1][^2]. TiDB aims to provide a one-stop database solution that covers OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services[^3].\\n\\n#### Architecture\\n\\nThe TiDB cluster consists of three main components[^4]:\\n\\n1. **TiDB Server**: Handles SQL parsing, query planning, and execution.\\n2. **TiKV Server**: Acts as the distributed key-value storage engine, storing the actual data.\\n3. **PD (Placement Driver) Server**: Manages cluster metadata, allocates timestamps, and handles data placement and load balancing.\\n\\nAdditionally, TiDB includes other components such as TiFlash for columnar storage, TiSpark for Spark integration, TiDB Binlog for data replication, and TiDB Lightning for high-performance data import[^5].\\n\\n#### Key Features\\n\\n1. **Scalability**: TiDB allows for easy horizontal scaling of both computing and storage resources. The architecture separates computing from storage, enabling independent scaling[^6]. This design ensures flexibility and adaptability to changing workloads[^7].\\n\\n2. **High Availability**: TiDB guarantees strong consistency and availability through its multi-replica architecture and Multi-Raft protocol. Data is stored in multiple replicas, and a transaction can only be committed when data has been successfully written into the majority of replicas[^8]. This ensures data integrity even in the event of failures[^9].\\n\\n3. **HTAP Capabilities**: TiDB supports both row-based (TiKV) and columnar (TiFlash) storage engines, enabling real-time processing of both transactional and analytical workloads. TiFlash uses the Multi-Raft Learner protocol to replicate data from TiKV in real time, ensuring consistent data across both engines[^10].\\n\\n4. **Cloud-Native Design**: TiDB is built for cloud environments, offering flexible scalability, reliability, and security on various cloud platforms. It integrates seamlessly with Kubernetes and offers a fully managed service, TiDB Cloud[^11].\\n\\n5. **MySQL Compatibility**: TiDB is compatible with the MySQL 5.7 protocol and ecosystem, allowing for easy migration of applications with minimal code changes[^12]. However, it does not support certain MySQL features like triggers, stored procedures, user-defined functions, and foreign keys[^13].\\n\\n#### Distributed Transactions\\n\\nTiDB supports distributed transactions across the cluster using a two-phase commit protocol with optimizations inspired by Google's Percolator. This model relies on a timestamp allocator to assign monotone increasing timestamps for each transaction, ensuring conflict detection[^14]. The PD server acts as the timestamp allocator in a TiDB cluster[^15].\\n\\n#### Ease of Use\\n\\nTiDB is designed to be user-friendly, allowing users to manage it with familiar MySQL tools and replace MySQL in applications without significant code changes[^16]. It supports various programming languages through MySQL clients or drivers[^17].\\n\\n#### Data Migration Tools\\n\\nTiDB provides a suite of tools for migrating, replicating, and backing up data. These tools facilitate the transition from MySQL to TiDB, making the migration process straightforward[^18].\\n\\n### Conclusion\\n\\nTiDB is a robust, scalable, and highly available distributed SQL database that offers comprehensive support for HTAP workloads. Its compatibility with MySQL, combined with its advanced features and cloud-native design, makes it a versatile choice for various data-intensive applications.\\n\\n[^1]: [TiDB Overview | PingCAP Docs](https://docs.pingcap.com/tidb/stable/overview)\\n[^2]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^3]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq)\\n[^4]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-architecture)\\n[^5]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v5.3/tidb-architecture)\\n[^6]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^7]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^8]: [TiDB High Availability | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^9]: [TiDB High Availability | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^10]: [TiDB HTAP Capabilities | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^11]: [TiDB Cloud-Native Design | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^12]: [TiDB MySQL Compatibility | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq)\\n[^13]: [TiDB MySQL Compatibility | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/tidb-faq)\\n[^14]: [TiDB Distributed Transactions | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq)\\n[^15]: [TiDB Distributed Transactions | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/tidb-faq)\\n[^16]: [TiDB Ease of Use | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq)\\n[^17]: [TiDB Programming Language Support | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq)\\n[^18]: [TiDB Data Migration Tools | PingCAP Docs](https://docs.pingcap.com/tidb/v5.3/tidb-architecture)\",\"error\":null,\"sources\":[{\"id\":8600,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.5/tidb-faq\"},{\"id\":9057,\"name\":\"Overview\",\"source_uri\":\"https://docs.pingcap.com/tidbcloud/tidb-architecture\"},{\"id\":9373,\"name\":\"TiDB Introduction\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.1/overview\"},{\"id\":9865,\"name\":\"Overview\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.1/tidb-architecture\"},{\"id\":10191,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v7.1/tidb-faq\"},{\"id\":11370,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v6.5/tidb-faq\"},{\"id\":12985,\"name\":\"TiDB Introduction\",\"source_uri\":\"https://docs.pingcap.com/tidb/v5.4/overview\"},{\"id\":13942,\"name\":\"Overview\",\"source_uri\":\"https://docs.pingcap.com/tidb/v5.3/tidb-architecture\"},{\"id\":14218,\"name\":\"TiDB FAQs\",\"source_uri\":\"https://docs.pingcap.com/tidb/v5.3/tidb-faq\"}],\"chat_id\":\"00000000-0000-0000-0000-00000000000\"}}]\n"
  },
  {
    "path": "frontend/app/src/api/.gitignore",
    "content": "*.http\nhttp-client.private.env.json\n"
  },
  {
    "path": "frontend/app/src/api/api-keys.ts",
    "content": "import { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType, type ZodTypeDef } from 'zod';\n\nexport interface ApiKey {\n  created_at: Date;\n  updated_at: Date;\n  description: string;\n  api_key_display: string;\n  is_active: boolean;\n  user_id: string;\n  id: number;\n}\n\nexport interface CreateApiKey {\n  description: string;\n}\n\nexport interface CreateApiKeyResponse {\n  api_key: string;\n}\n\nconst apiKeySchema = z.object({\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  description: z.string(),\n  api_key_display: z.string(),\n  is_active: z.boolean(),\n  user_id: z.string(),\n  id: z.number(),\n}) satisfies ZodType<ApiKey, ZodTypeDef, any>;\n\nconst createApiKeyResponseSchema = z.object({\n  api_key: z.string(),\n}) satisfies ZodType<CreateApiKeyResponse>;\n\nexport async function listApiKeys ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<ApiKey>> {\n  return await fetch(requestUrl('/api/v1/api-keys', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(apiKeySchema)));\n}\n\nexport async function createApiKey (create: CreateApiKey): Promise<CreateApiKeyResponse> {\n  return await fetch(requestUrl('/api/v1/api-keys'), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(create),\n  }).then(handleResponse(createApiKeyResponseSchema));\n}\n\nexport async function deleteApiKey (id: number): Promise<void> {\n  await fetch(requestUrl(`/api/v1/api-keys/${id}`), {\n    method: 'DELETE',\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  }).then(handleErrors);\n}\n\n"
  },
  {
    "path": "frontend/app/src/api/auth.ts",
    "content": "import { authenticationHeaders, buildUrlParams, handleErrors, requestUrl } from '@/lib/request';\n\nexport interface LoginParams {\n  username: string;\n  password: string;\n}\n\nexport async function login (params: LoginParams) {\n  const usp = buildUrlParams(params);\n\n  await fetch(requestUrl('/api/v1/auth/login'), {\n    method: 'POST',\n    body: usp,\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/x-www-form-urlencoded',\n    },\n  }).then(handleErrors);\n}\n\nexport async function logout () {\n  await fetch(requestUrl('/api/v1/auth/logout'), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n    method: 'POST',\n  }).then(handleErrors);\n}\n"
  },
  {
    "path": "frontend/app/src/api/chat-engines.ts",
    "content": "import { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType } from 'zod';\n\nexport interface ChatEngine {\n  id: number;\n  name: string;\n  updated_at: Date;\n  created_at: Date;\n  deleted_at: Date | null;\n  engine_options: ChatEngineOptions;\n  llm_id: number | null;\n  fast_llm_id: number | null;\n  reranker_id: number | null;\n  is_default: boolean;\n  is_public: boolean;\n}\n\nexport interface CreateChatEngineParams {\n  name: string;\n  engine_options: ChatEngineOptions;\n  llm_id?: number | null;\n  fast_llm_id?: number | null;\n  reranker_id?: number | null;\n}\n\nexport interface ChatEngineOptions {\n  external_engine_config?: {\n    stream_chat_api_url?: string | null\n  } | null;\n  clarify_question?: boolean | null;\n  further_questions?: boolean | null;\n  knowledge_base?: ChatEngineKnowledgeBaseOptions | null;\n  knowledge_graph?: ChatEngineKnowledgeGraphOptions | null;\n  llm?: ChatEngineLLMOptions | null;\n  post_verification_url?: string | null;\n  post_verification_token?: string | null;\n  hide_sources?: boolean | null;\n}\n\nexport interface ChatEngineKnowledgeBaseOptions {\n  /**\n   * @deprecated\n   */\n  linked_knowledge_base?: LinkedKnowledgeBaseOptions | null;\n  linked_knowledge_bases?: { id: number }[] | null;\n}\n\nexport interface ChatEngineKnowledgeGraphOptions {\n  depth?: number | null;\n  enabled?: boolean | null;\n  include_meta?: boolean | null;\n  with_degree?: boolean | null;\n  using_intent_search?: boolean | null;\n}\n\nexport type ChatEngineLLMOptions = {\n  condense_question_prompt?: string | null\n  text_qa_prompt?: string | null\n  intent_graph_knowledge?: string | null\n  normal_graph_knowledge?: string | null\n  clarifying_question_prompt?: string | null\n  generate_goal_prompt?: string | null\n  further_questions_prompt?: string | null\n}\n\n/**\n * @deprecated\n */\nexport interface LinkedKnowledgeBaseOptions {\n  id?: number | null;\n}\n\nconst kbOptionsSchema = z.object({\n  linked_knowledge_base: z.object({ id: z.number().nullable().optional() }).nullable().optional(),\n  linked_knowledge_bases: z.object({ id: z.number() }).array().nullable().optional(),\n}).passthrough();\n\nconst kgOptionsSchema = z.object({\n  depth: z.number().nullable().optional(),\n  enabled: z.boolean().nullable().optional(),\n  include_meta: z.boolean().nullable().optional(),\n  with_degree: z.boolean().nullable().optional(),\n  using_intent_search: z.boolean().nullable().optional(),\n}).passthrough() satisfies ZodType<ChatEngineKnowledgeGraphOptions>;\n\nconst llmOptionsSchema =\n  z.object({\n    condense_question_prompt: z.string().nullable().optional(),\n    text_qa_prompt: z.string().nullable().optional(),\n    intent_graph_knowledge: z.string().nullable().optional(),\n    normal_graph_knowledge: z.string().nullable().optional(),\n    clarifying_question_prompt: z.string().nullable().optional(),\n    generate_goal_prompt: z.string().nullable().optional(),\n    further_questions_prompt: z.string().nullable().optional(),\n    // provider: z.string(),\n    // reranker_provider: z.string(),\n    // reranker_top_k: z.number(),\n  }).passthrough() as ZodType<ChatEngineLLMOptions, any, any>;\n\nconst chatEngineOptionsSchema = z.object({\n  external_engine_config: z.object({\n    stream_chat_api_url: z.string().optional().nullable(),\n  }).nullable().optional(),\n  clarify_question: z.boolean().nullable().optional(),\n  further_questions: z.boolean().nullable().optional(),\n  knowledge_base: kbOptionsSchema.nullable().optional(),\n  knowledge_graph: kgOptionsSchema.nullable().optional(),\n  llm: llmOptionsSchema.nullable().optional(),\n  post_verification_url: z.string().nullable().optional(),\n  post_verification_token: z.string().nullable().optional(),\n  hide_sources: z.boolean().nullable().optional(),\n}).passthrough()\n  .refine(option => {\n    if (!option.knowledge_base?.linked_knowledge_bases?.length) {\n      if (option.knowledge_base?.linked_knowledge_base?.id != null) {\n        // Frontend temporary migration. Should be removed after backend removed linked_knowledge_base field.\n        option.knowledge_base.linked_knowledge_bases = [{\n          id: option.knowledge_base.linked_knowledge_base.id,\n        }];\n        delete option.knowledge_base.linked_knowledge_base;\n      }\n    }\n    return option;\n  }) satisfies ZodType<ChatEngineOptions, any, any>;\n\nconst chatEngineSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  updated_at: zodJsonDate(),\n  created_at: zodJsonDate(),\n  deleted_at: zodJsonDate().nullable(),\n  engine_options: chatEngineOptionsSchema,\n  llm_id: z.number().nullable(),\n  fast_llm_id: z.number().nullable(),\n  reranker_id: z.number().nullable(),\n  is_default: z.boolean(),\n  is_public: z.boolean(),\n}) satisfies ZodType<ChatEngine, any, any>;\n\nexport async function getDefaultChatEngineOptions (): Promise<ChatEngineOptions> {\n  return await fetch(requestUrl('/api/v1/admin/chat-engines-default-config'), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(chatEngineOptionsSchema));\n}\n\nexport async function listChatEngines ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<ChatEngine>> {\n  return await fetch(requestUrl('/api/v1/admin/chat-engines', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(chatEngineSchema)));\n}\n\nexport async function getChatEngine (id: number): Promise<ChatEngine> {\n  return await fetch(requestUrl(`/api/v1/admin/chat-engines/${id}`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(chatEngineSchema));\n}\n\nexport async function updateChatEngine (id: number, partial: Partial<Pick<ChatEngine, 'name' | 'llm_id' | 'fast_llm_id' | 'reranker_id' | 'engine_options' | 'is_default'>>): Promise<void> {\n  await fetch(requestUrl(`/api/v1/admin/chat-engines/${id}`), {\n    method: 'PUT',\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n    body: JSON.stringify(partial),\n  })\n    .then(handleErrors);\n}\n\nexport async function createChatEngine (create: CreateChatEngineParams) {\n  return await fetch(requestUrl(`/api/v1/admin/chat-engines`), {\n    method: 'POST',\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n    body: JSON.stringify(create),\n  })\n    .then(handleResponse(chatEngineSchema));\n}\n\nexport async function deleteChatEngine (id: number): Promise<void> {\n  await fetch(requestUrl(`/api/v1/admin/chat-engines/${id}`), {\n    method: 'DELETE',\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleErrors);\n}\n\nexport async function listPublicChatEngines ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<ChatEngine>> {\n  return await fetch(requestUrl('/api/v1/chat-engines', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(chatEngineSchema)));\n}\n\nexport async function getPublicChatEngine (id: number): Promise<ChatEngine> {\n  return await fetch(requestUrl(`/api/v1/chat-engines/${id}`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(chatEngineSchema));\n}\n"
  },
  {
    "path": "frontend/app/src/api/chats.ts",
    "content": "import type { ChatEngineOptions } from '@/api/chat-engines';\nimport { type KnowledgeGraph, KnowledgeGraphEntityType, knowledgeGraphSchema } from '@/api/graph';\nimport { bufferedReadableStreamTransformer } from '@/lib/buffered-readable-stream';\nimport { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { parseStreamPart } from 'ai';\nimport { z, type ZodType } from 'zod';\n\ntype ClientEngineOptions = Omit<ChatEngineOptions, 'post_verification_token'>;\n\nexport interface Chat {\n  title: string;\n  engine_id: number;\n  engine_options: ClientEngineOptions;\n  deleted_at: Date | null;\n  user_id: string | null;\n  browser_id: string | null;\n  updated_at: Date;\n  created_at: Date;\n  id: string;\n  origin: string | null;\n}\n\nexport interface ChatDetail {\n  chat: Chat;\n  messages: ChatMessage[];\n}\n\nexport const enum ChatMessageRole {\n  assistant = 'assistant',\n  user = 'user'\n}\n\nexport interface ChatMessage {\n  id: number;\n  role: ChatMessageRole;\n  error: string | null;\n  trace_url: string | null;\n  finished_at: Date | null;\n  user_id: string | null;\n  created_at: Date;\n  updated_at: Date;\n  ordinal: number;\n  content: string;\n  sources: ChatMessageSource[];\n  chat_id: string;\n  post_verification_result_url: string | null;\n}\n\nexport interface ChatMessageSource {\n  id: number;\n  name: string;\n  source_uri: string;\n}\n\nexport const chatSchema = z.object({\n  title: z.string(),\n  engine_id: z.number(),\n  engine_options: z.object({}).passthrough().transform(value => value as never as ChatEngineOptions),\n  deleted_at: zodJsonDate().nullable(),\n  user_id: z.string().nullable(),\n  browser_id: z.string().nullable(),\n  updated_at: zodJsonDate(),\n  created_at: zodJsonDate(),\n  id: z.string(),\n  origin: z.string().nullable(),\n}) satisfies ZodType<Chat, any, any>;\n\nconst chatMessageSourceSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  source_uri: z.string(),\n});\n\nexport const chatMessageSchema = z.object({\n  id: z.number(),\n  role: z.enum([ChatMessageRole.user, ChatMessageRole.assistant]),\n  error: z.string().nullable(),\n  trace_url: z.string().nullable(),\n  finished_at: zodJsonDate().nullable(),\n  user_id: z.string().nullable(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  ordinal: z.number(),\n  content: z.string(),\n  sources: chatMessageSourceSchema.array(),\n  chat_id: z.string(),\n  post_verification_result_url: z.string().url().nullable(),\n}) satisfies ZodType<ChatMessage, any, any>;\n\nconst chatDetailSchema = z.object({\n  chat: chatSchema,\n  messages: chatMessageSchema.array(),\n});\n\nexport interface FeedbackParams {\n  feedback_type: 'like' | 'dislike';\n  comment: string;\n}\n\nexport interface PostChatParams {\n  chat_id?: string;\n  chat_engine?: string;\n  content: string;\n\n  headers?: HeadersInit;\n  signal?: AbortSignal;\n}\n\nexport async function listChats ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<Chat>> {\n  return await fetch(requestUrl('/api/v1/chats', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(chatSchema)));\n}\n\nexport async function getChat (id: string): Promise<ChatDetail> {\n  return await fetch(requestUrl(`/api/v1/chats/${id}`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(chatDetailSchema));\n}\n\nexport async function deleteChat (id: string): Promise<void> {\n  await fetch(requestUrl(`/api/v1/chats/${id}`), {\n    method: 'delete',\n    headers: await authenticationHeaders(),\n  }).then(handleErrors);\n}\n\nexport async function postFeedback (chatMessageId: number, feedback: FeedbackParams) {\n  return await fetch(requestUrl(`/api/v1/chat-messages/${chatMessageId}/feedback`), {\n    method: 'post',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    credentials: 'include',\n    body: JSON.stringify(feedback),\n  }).then(handleErrors);\n}\n\nexport async function getChatMessageSubgraph (chatMessageId: number): Promise<KnowledgeGraph> {\n  return await fetch(requestUrl(`/api/v1/chat-messages/${chatMessageId}/subgraph`), {\n    headers: await authenticationHeaders(),\n    credentials: 'include',\n  })\n    .then(handleResponse(knowledgeGraphSchema));\n}\n\nexport async function getChatMessageRecommendedQuestions (chatMessageId: number) {\n  return await fetch(requestUrl(`/api/v1/chat-messages/${chatMessageId}/recommended-questions`), {\n    headers: await authenticationHeaders(),\n    credentials: 'include',\n  })\n    .then(handleResponse(z.string().array()));\n}\n\nexport async function reloadChatMessageRecommendedQuestions (chatMessageId: number) {\n  return await fetch(requestUrl(`/api/v1/chat-messages/${chatMessageId}/recommended-questions`), {\n    method: 'POST',\n    headers: await authenticationHeaders(),\n    credentials: 'include',\n  })\n    .then(handleResponse(z.string().array()));\n}\n\nexport async function* chat ({ chat_id, chat_engine, content, headers: headersInit, signal }: PostChatParams, onResponse?: (response: Response) => void) {\n  const headers = new Headers(headersInit);\n  headers.set('Content-Type', 'application/json');\n\n  for (let [key, value] of Object.entries(await authenticationHeaders())) {\n    headers.set(key, value);\n  }\n\n  const response = await fetch(requestUrl(`/api/v1/chats`), {\n    method: 'POST',\n    headers,\n    credentials: 'include',\n    body: JSON.stringify({\n      chat_id,\n      chat_engine,\n      stream: true,\n      messages: [{\n        'role': 'user',\n        content,\n      }],\n    }),\n    signal,\n  }).then(handleErrors);\n\n  onResponse?.(response);\n\n  if (!response.body) {\n    throw new Error(`${response.status} ${response.statusText} Empty response body`);\n  }\n\n  const reader = response.body.pipeThrough(bufferedReadableStreamTransformer()).getReader();\n\n  while (true) {\n    const chunk = await reader.read();\n    if (chunk.done) {\n      break;\n    }\n\n    if (!!chunk.value.trim()) {\n      yield parseStreamPart(chunk.value);\n    }\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/api/commons.ts",
    "content": "import { handleErrors, requestUrl } from '@/lib/request';\n\nexport async function setDefault (type: 'embedding-models' | 'llms' | 'reranker-models', id: number) {\n  await fetch(requestUrl(`/api/v1/admin/${type}/${id}/set_default`), {\n    method: 'PUT',\n  }).then(handleErrors);\n}\n"
  },
  {
    "path": "frontend/app/src/api/datasources.ts",
    "content": "import { type IndexProgress, indexSchema, type IndexTotalStats, totalSchema } from '@/api/rag';\nimport { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType } from 'zod';\n\nexport interface DatasourceBase {\n  id: number;\n  name: string;\n}\n\ntype DatasourceSpec = ({\n  data_source_type: 'file'\n  config: { file_id: number, file_name: string }[]\n} | {\n  data_source_type: 'web_sitemap'\n  config: { url: string }\n} | {\n  data_source_type: 'web_single_page'\n  config: { urls: string[] }\n})\n\nexport type Datasource = DatasourceBase & DatasourceSpec;\n\nexport type DataSourceIndexProgress = {\n  vector_index: IndexProgress\n  documents: IndexTotalStats\n  chunks: IndexTotalStats\n  kg_index?: IndexProgress\n  relationships?: IndexTotalStats\n}\n\nexport interface BaseCreateDatasourceParams {\n  name: string;\n}\n\nexport type CreateDatasourceSpecParams = ({\n  data_source_type: 'file'\n  config: { file_id: number, file_name: string }[]\n} | {\n  data_source_type: 'web_single_page'\n  config: { urls: string[] }\n} | {\n  data_source_type: 'web_sitemap'\n  config: { url: string }\n});\n\nexport type CreateDatasourceParams = BaseCreateDatasourceParams & CreateDatasourceSpecParams;\n\nexport interface Upload {\n  created_at?: Date;\n  updated_at?: Date;\n  id: number;\n  name: string;\n  size: number;\n  path: string;\n  mime_type: string;\n  user_id: string;\n}\n\nexport type DatasourceVectorIndexError = {\n  document_id: number\n  document_name: string\n  source_uri: string\n  error: string | null\n}\n\nexport type DatasourceKgIndexError = {\n  document_id: number\n  document_name: string\n  chunk_id: string\n  source_uri: string\n  error: string | null\n}\n\nconst datasourceSpecSchema = z.discriminatedUnion('data_source_type', [\n  z.object({\n    data_source_type: z.literal('file'),\n    config: z.array(z.object({ file_id: z.number(), file_name: z.string() })),\n  }),\n  z.object({\n    data_source_type: z.enum(['web_single_page']),\n    config: z.object({ urls: z.string().array() }).or(z.object({ url: z.string() })).transform(obj => {\n      if ('url' in obj) {\n        return { urls: [obj.url] };\n      } else {\n        return obj;\n      }\n    }),\n  }),\n  z.object({\n    data_source_type: z.enum(['web_sitemap']),\n    config: z.object({ url: z.string() }),\n  })],\n) satisfies ZodType<DatasourceSpec, any, any>;\n\nexport const datasourceSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n}).and(datasourceSpecSchema) satisfies ZodType<Datasource, any, any>;\n\nconst uploadSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  size: z.number(),\n  path: z.string(),\n  mime_type: z.string(),\n  user_id: z.string(),\n  created_at: zodJsonDate().optional(),\n  updated_at: zodJsonDate().optional(),\n}) satisfies ZodType<Upload, any, any>;\nz.object({\n  vector_index: indexSchema,\n  documents: totalSchema,\n  chunks: totalSchema,\n  kg_index: indexSchema.optional(),\n  relationships: totalSchema.optional(),\n}) satisfies ZodType<DataSourceIndexProgress>;\n\nexport async function listDataSources (kbId: number, { page = 1, size = 10 }: PageParams = {}): Promise<Page<Datasource>> {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/datasources`, { page, size }), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(zodPage(datasourceSchema)));\n}\n\nexport async function getDatasource (kbId: number, id: number): Promise<Datasource> {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/datasources/${id}`), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(datasourceSchema));\n}\n\nexport async function deleteDatasource (kbId: number, id: number): Promise<void> {\n  await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/datasources/${id}`), {\n    method: 'DELETE',\n    headers: await authenticationHeaders(),\n  }).then(handleErrors);\n}\n\nexport async function createDatasource (kbId: number, params: CreateDatasourceParams) {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/datasources`), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(datasourceSchema));\n}\n\nexport async function updateDatasource (kbId: number, id: number, params: { name: string }) {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/datasources/${id}`), {\n    method: 'PUT',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(datasourceSchema));\n}\n\nexport async function uploadFiles (files: File[]) {\n  const formData = new FormData();\n  files.forEach((file) => {\n    formData.append('files', file);\n  });\n\n  return fetch(requestUrl(`/api/v1/admin/uploads`), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n    },\n    body: formData,\n  }).then(handleResponse(uploadSchema.array()));\n}\n"
  },
  {
    "path": "frontend/app/src/api/documents.ts",
    "content": "import { indexStatuses } from '@/api/rag';\nimport { authenticationHeaders, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType } from 'zod';\n\nexport const mimeTypes = [\n  { name: 'Text', value: 'text/plain' },\n  { name: 'Markdown', value: 'text/markdown' },\n  { name: 'Pdf', value: 'application/pdf' },\n  { name: 'Microsoft Word (docx)', value: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },\n  { name: 'Microsoft PowerPoint (pptx)', value: 'application/vnd.openxmlformats-officedocument.presentationml.presentation' },\n  { name: 'Microsoft Excel (xlsx)', value: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },\n] as const satisfies MimeType[];\n\nconst mimeValues: (typeof mimeTypes)[number]['value'] = mimeTypes.map(m => m.value) as never;\n\nexport interface Document {\n  id: number,\n  name: string,\n  created_at?: Date | undefined;\n  updated_at?: Date | undefined\n  last_modified_at: Date,\n  hash: string\n  content: string\n  meta: object,\n  mime_type: string,\n  source_uri: string,\n  index_status: string,\n  index_result?: unknown\n  data_source: {\n    id: number\n    name: string\n  }\n  knowledge_base: {\n    id: number\n    name: string\n  } | null\n}\n\nexport const documentSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  last_modified_at: zodJsonDate(),\n  hash: z.string(),\n  content: z.string(),\n  meta: z.object({}).passthrough(),\n  mime_type: z.string(),\n  source_uri: z.string(),\n  index_status: z.string(),\n  index_result: z.unknown(),\n  data_source: z.object({\n    id: z.number(),\n    name: z.string(),\n  }),\n  knowledge_base: z.object({\n    id: z.number(),\n    name: z.string(),\n  }).nullable(),\n}) satisfies ZodType<Document, any, any>;\n\nconst zDate = z.coerce.date().or(z.literal('').transform(() => undefined)).optional();\nconst zDateRange = z.tuple([zDate, zDate]).optional();\n\nexport const listDocumentsFiltersSchema = z.object({\n  search: z.string().optional(),\n  knowledge_base_id: z.number().optional(),\n  created_at: zDateRange,\n  updated_at: zDateRange,\n  last_modified_at: zDateRange,\n  mime_type: z.enum(mimeValues).optional(),\n  index_status: z.enum(indexStatuses).optional(),\n});\n\nexport type ListDocumentsTableFilters = z.infer<typeof listDocumentsFiltersSchema>;\n\nexport async function listDocuments ({ page = 1, size = 10, knowledge_base_id, search, ...filters }: PageParams & ListDocumentsTableFilters = {}): Promise<Page<Document>> {\n  const apiFilters = {\n    ...filters,\n    knowledge_base_id,\n    search: search\n  };\n  const api_url = knowledge_base_id != null ? `/api/v1/admin/knowledge_bases/${knowledge_base_id}/documents` : '/api/v1/admin/documents';\n  return await fetch(requestUrl(api_url, { page, size, ...apiFilters }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(documentSchema)));\n}\n\nexport interface MimeType {\n  name: string;\n  value: string;\n}\n\n"
  },
  {
    "path": "frontend/app/src/api/embedding-models.ts",
    "content": "import { type ProviderOption, providerOptionSchema } from '@/api/providers';\nimport { authenticationHeaders, handleNullableResponse, handleResponse, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType, type ZodTypeDef } from 'zod';\n\nexport interface EmbeddingModelSummary {\n  id: number;\n  name: string;\n  provider: string;\n  model: string;\n  vector_dimension: number,\n  is_default: boolean\n}\n\nexport interface EmbeddingModel extends EmbeddingModelSummary {\n  config?: any;\n  created_at?: Date | null;\n  updated_at?: Date | null;\n}\n\nexport interface EmbeddingModelOption extends ProviderOption {\n  default_embedding_model: string;\n  embedding_model_description: string;\n}\n\nexport interface CreateEmbeddingModel {\n  name: string;\n  provider: string;\n  model: string;\n  vector_dimension: number;\n  config?: any;\n  credentials: string | object;\n}\n\nexport interface UpdateEmbeddingModel {\n  name?: string;\n  config?: any;\n  credentials?: string | object;\n}\n\nexport const embeddingModelSummarySchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  provider: z.string(),\n  model: z.string(),\n  vector_dimension: z.number(),\n  is_default: z.boolean(),\n}) satisfies ZodType<EmbeddingModelSummary, ZodTypeDef, any>;\n\nconst embeddingModelSchema = embeddingModelSummarySchema.extend({\n  config: z.any(),\n  created_at: zodJsonDate().nullable().optional(),\n  updated_at: zodJsonDate().nullable().optional(),\n}) satisfies ZodType<EmbeddingModel, ZodTypeDef, any>;\n\nconst embeddingModelOptionSchema = providerOptionSchema.and(z.object({\n  default_embedding_model: z.string(),\n  embedding_model_description: z.string(),\n})) satisfies ZodType<EmbeddingModelOption, any, any>;\n\nexport async function listEmbeddingModelOptions () {\n  return await fetch(requestUrl(`/api/v1/admin/embedding-models/providers/options`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(embeddingModelOptionSchema.array()));\n}\n\nexport async function getEmbeddingModel (id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/embedding-models/${id}`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(embeddingModelSchema));\n}\n\nexport async function listEmbeddingModels (params: PageParams) {\n  return await fetch(requestUrl(`/api/v1/admin/embedding-models`, params), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(embeddingModelSchema)));\n}\n\nexport async function createEmbeddingModel (create: CreateEmbeddingModel) {\n  return await fetch(requestUrl(`/api/v1/admin/embedding-models`), {\n    method: 'POST',\n    body: JSON.stringify(create),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  }).then(handleResponse(embeddingModelSchema));\n}\n\nexport async function updateEmbeddingModel (id: number, update: UpdateEmbeddingModel) {\n  return await fetch(requestUrl(`/api/v1/admin/embedding-models/${id}`), {\n    method: 'PUT',\n    body: JSON.stringify(update),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  }).then(handleResponse(embeddingModelSchema));\n}\n\nexport async function testEmbeddingModel (createEmbeddingModel: CreateEmbeddingModel) {\n  return await fetch(requestUrl(`/api/v1/admin/embedding-models/test`), {\n    method: 'POST',\n    body: JSON.stringify(createEmbeddingModel),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(z.object({\n      success: z.boolean(),\n      error: z.string().optional(),\n    })));\n}\n\n"
  },
  {
    "path": "frontend/app/src/api/evaluations.ts",
    "content": "import { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJson, zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType } from 'zod';\n\nexport interface EvaluationDataset {\n  id: number;\n  name: string;\n  user_id: string;\n  created_at: Date;\n  updated_at: Date;\n}\n\nexport interface EvaluationDatasetItem {\n  created_at: Date;\n  updated_at: Date;\n  id: number;\n  query: string;\n  reference: string;\n  retrieved_contexts: string[];\n  extra: any;\n  evaluation_dataset_id: number;\n}\n\nexport interface EvaluationTask {\n  id: number;\n  name: string;\n  user_id: string;\n  created_at: Date;\n  updated_at: Date;\n  dataset_id: number;\n}\n\nexport interface EvaluationTaskWithSummary extends EvaluationTask {\n  summary: EvaluationTaskSummary;\n}\n\nexport const EvaluationTaskSummaryMetrics = [\n  'avg_factual_correctness',\n  'avg_semantic_similarity',\n  'min_factual_correctness',\n  'min_semantic_similarity',\n  'max_factual_correctness',\n  'max_semantic_similarity',\n  'std_factual_correctness',\n  'std_semantic_similarity',\n] as const;\n\nexport type EvaluationTaskSummaryMetric = typeof EvaluationTaskSummaryMetrics[number];\n\nexport interface EvaluationTaskSummary extends Record<EvaluationTaskSummaryMetric, number | null> {\n  not_start: number;\n  succeed: number;\n  errored: number;\n  progressing: number;\n}\n\nexport type EvaluationTaskItemStatus = 'not_start' | 'evaluating' | 'done' | 'error' | 'cancel'\n\nexport interface EvaluationTaskItem {\n  created_at: Date;\n  updated_at: Date;\n  id: number;\n  chat_engine: string;\n  status: EvaluationTaskItemStatus;\n  query: string;\n  reference: string;\n  response: string | null;\n  retrieved_contexts: string[] | null;\n  extra: any | null;\n  error_msg: string | null;\n  factual_correctness: number | null;\n  semantic_similarity: number | null;\n  evaluation_task_id: number;\n}\n\nexport interface CreateEvaluationDatasetParams {\n  name: string;\n  upload_id?: number;\n}\n\nexport interface UpdateEvaluationDatasetParams {\n  name: string;\n}\n\nexport interface CreateEvaluationDatasetItemParams {\n  query: string;\n  reference: string;\n  retrieved_contexts: string[];\n  extra: any;\n}\n\nexport interface UpdateEvaluationDatasetItemParams {\n  query: string;\n  reference: string;\n  retrieved_contexts: string[];\n  extra: any;\n}\n\nexport interface CreateEvaluationTaskParams {\n  name: string;\n  evaluation_dataset_id: number;\n  chat_engine?: string;\n  run_size?: number;\n}\n\nconst evaluationDatasetSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  user_id: z.string(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n}) satisfies ZodType<EvaluationDataset, any, any>;\n\nconst evaluationDatasetItemSchema = z.object({\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  id: z.number(),\n  query: z.string(),\n  reference: z.string(),\n  retrieved_contexts: z.string().array(),\n  extra: zodJson(),\n  evaluation_dataset_id: z.number(),\n}) satisfies ZodType<EvaluationDatasetItem, any, any>;\n\nconst evaluationTaskSummarySchema = z.object({\n  not_start: z.number(),\n  succeed: z.number(),\n  errored: z.number(),\n  progressing: z.number(),\n  avg_factual_correctness: z.number().nullable(),\n  avg_semantic_similarity: z.number().nullable(),\n  min_factual_correctness: z.number().nullable(),\n  min_semantic_similarity: z.number().nullable(),\n  max_factual_correctness: z.number().nullable(),\n  max_semantic_similarity: z.number().nullable(),\n  std_factual_correctness: z.number().nullable(),\n  std_semantic_similarity: z.number().nullable(),\n}) satisfies ZodType<EvaluationTaskSummary, any, any>;\n\nconst evaluationTaskSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  user_id: z.string(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  dataset_id: z.number(),\n}) satisfies ZodType<EvaluationTask, any, any>;\n\nconst evaluationTaskWithSummarySchema = evaluationTaskSchema.extend({\n  summary: evaluationTaskSummarySchema,\n});\n\nconst evaluationTaskItemSchema = z.object({\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  id: z.number(),\n  chat_engine: z.string(),\n  status: z.enum(['not_start', 'evaluating', 'done', 'error', 'cancel']),\n  query: z.string(),\n  reference: z.string(),\n  response: z.string().nullable(),\n  retrieved_contexts: z.string().array().nullable(),\n  extra: zodJson().nullable(),\n  error_msg: z.string().nullable(),\n  factual_correctness: z.number().nullable(),\n  semantic_similarity: z.number().nullable(),\n  evaluation_task_id: z.number(),\n}) satisfies ZodType<EvaluationTaskItem, any, any>;\n\n// Datasets\n\nexport async function listEvaluationDatasets ({ ...params }: PageParams & { keyword?: string }): Promise<Page<EvaluationDataset>> {\n  return fetch(requestUrl('/api/v1/admin/evaluation/datasets', params), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(evaluationDatasetSchema)));\n}\n\nexport async function createEvaluationDataset (params: CreateEvaluationDatasetParams): Promise<EvaluationDataset> {\n  return fetch(requestUrl('/api/v1/admin/evaluation/datasets'), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  })\n    .then(handleResponse(evaluationDatasetSchema));\n}\n\nexport async function updateEvaluationDataset (id: number, params: UpdateEvaluationDatasetParams): Promise<EvaluationDataset> {\n  return fetch(requestUrl(`/api/v1/admin/evaluation/datasets/${id}`), {\n    method: 'PUT',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  })\n    .then(handleResponse(evaluationDatasetSchema));\n}\n\nexport async function deleteEvaluationDataset (id: number): Promise<void> {\n  await fetch(requestUrl(`/api/v1/admin/evaluation/datasets/${id}`), {\n    method: 'DELETE',\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  });\n}\n\n// Dataset Items\n\nexport async function listEvaluationDatasetItems (datasetId: number, { ...params }: PageParams & { keyword?: string }): Promise<Page<EvaluationDatasetItem>> {\n  return fetch(requestUrl(`/api/v1/admin/evaluation/datasets/${datasetId}/dataset-items`, params), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(evaluationDatasetItemSchema)));\n}\n\nexport async function createEvaluationDatasetItem (datasetId: number, params: CreateEvaluationDatasetItemParams): Promise<EvaluationDatasetItem> {\n  return fetch(requestUrl(`/api/v1/admin/evaluation/dataset-items`), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify({\n      evaluation_dataset_id: datasetId,\n      ...params,\n    }),\n  })\n    .then(handleResponse(evaluationDatasetItemSchema));\n}\n\nexport async function updateEvaluationDatasetItem (datasetId: number, id: number, params: UpdateEvaluationDatasetItemParams): Promise<EvaluationDatasetItem> {\n  return fetch(requestUrl(`/api/v1/admin/evaluation/dataset-items/${id}`), {\n    method: 'PUT',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify({\n      evaluation_dataset_id: datasetId,\n      ...params,\n    }),\n  })\n    .then(handleResponse(evaluationDatasetItemSchema));\n}\n\nexport async function getEvaluationDatasetItem (datasetId: number, id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/evaluation/dataset-items/${id}`), {\n    method: 'GET',\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(evaluationDatasetItemSchema));\n}\n\nexport async function deleteEvaluationDatasetItem (datasetId: number, id: number): Promise<void> {\n  await fetch(requestUrl(`/api/v1/admin/evaluation/dataset-items/${id}`), {\n    method: 'DELETE',\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleErrors);\n}\n\n// Tasks\n\nexport async function createEvaluationTask (params: CreateEvaluationTaskParams): Promise<EvaluationTask> {\n  return fetch(requestUrl('/api/v1/admin/evaluation/tasks'), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  })\n    .then(handleResponse(evaluationTaskSchema));\n}\n\nexport async function listEvaluationTasks ({ ...params }: PageParams & { keyword?: string }): Promise<Page<EvaluationTaskWithSummary>> {\n  return fetch(requestUrl('/api/v1/admin/evaluation/tasks', params), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(evaluationTaskWithSummarySchema)));\n}\n\nexport async function getEvaluationTaskWithSummary (id: number): Promise<EvaluationTaskWithSummary> {\n  return fetch(requestUrl(`/api/v1/admin/evaluation/tasks/${id}/summary`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(evaluationTaskWithSummarySchema));\n}\n\nexport async function cancelEvaluationTask (id: number): Promise<void> {\n  await fetch(requestUrl(`/api/v1/admin/evaluation/tasks/${id}`), {\n    method: 'DELETE',\n    headers: await authenticationHeaders(),\n  })\n    .then(handleErrors);\n}\n\nexport async function listEvaluationTaskItems (id: number, params: PageParams & { keyword?: string }): Promise<Page<EvaluationTaskItem>> {\n  return fetch(requestUrl(`/api/v1/admin/evaluation/tasks/${id}/items`, params), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(evaluationTaskItemSchema)));\n}\n"
  },
  {
    "path": "frontend/app/src/api/feedbacks.ts",
    "content": "import { authenticationHeaders, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType } from 'zod';\n\nexport const enum FeedbackType {\n  like = 'like',\n  dislike = 'dislike',\n}\n\nexport interface Feedback {\n  id: number;\n  feedback_type: FeedbackType;\n  comment: string;\n  chat_id: string;\n  chat_message_id: number;\n  created_at: Date;\n  updated_at: Date;\n  user_id: string | null;\n  chat_title: string;\n  origin: string | null;\n  chat_origin: string | null;\n  chat_message_content: string;\n  user_email: string | null;\n}\n\nconst feedbackSchema = z.object({\n  id: z.number(),\n  feedback_type: z.enum([FeedbackType.like, FeedbackType.dislike]),\n  comment: z.string(),\n  chat_id: z.string(),\n  chat_message_id: z.number(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  user_id: z.string().nullable(),\n  user_email: z.string().nullable(),\n  origin: z.string().nullable(),\n  chat_title: z.string(),\n  chat_origin: z.string().nullable(),\n  chat_message_content: z.string(),\n}) satisfies ZodType<Feedback, any, any>;\n\nexport async function listFeedbacks ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<Feedback>> {\n  return await fetch(requestUrl('/api/v1/admin/feedbacks', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(feedbackSchema)));\n}\n"
  },
  {
    "path": "frontend/app/src/api/graph.ts",
    "content": "import { authenticationHeaders, handleResponse, requestUrl } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { bufferedReadableStreamTransformer } from '@/lib/buffered-readable-stream';\nimport { z, type ZodType } from 'zod';\n\nexport interface KnowledgeGraph {\n  entities: KnowledgeGraphEntity[];\n  relationships: KnowledgeGraphRelationship[];\n}\n\nexport const enum KnowledgeGraphEntityType {\n  original = 'original',\n  synopsis = 'synopsis',\n}\n\nexport interface KnowledgeGraphEntity {\n  id: number;\n  knowledge_base_id?: number | null;\n  name: string;\n  description: string;\n  meta: object;\n  entity_type: KnowledgeGraphEntityType;\n  synopsis_info?: {\n    entities: number[]\n    topic: string\n  } | null;\n}\n\nexport interface KnowledgeGraphRelationship {\n  id: number;\n  source_entity_id: number;\n  target_entity_id: number;\n  knowledge_base_id?: number | null;\n  description: string;\n  meta: object;\n  weight: number;\n}\n\nexport const entitySchema = z.object({\n  id: z.number(),\n  knowledge_base_id: z.number().nullable().optional(),\n  name: z.string(),\n  description: z.string(),\n  meta: z.object({}).passthrough(),\n  entity_type: z.enum([KnowledgeGraphEntityType.original, KnowledgeGraphEntityType.synopsis]),\n  synopsis_info: z.object({\n    entities: z.number().array(),\n    topic: z.string(),\n  }).nullish(),\n}) satisfies ZodType<KnowledgeGraphEntity>;\n\nexport const relationshipSchema = z.object({\n  id: z.number(),\n  knowledge_base_id: z.number().nullable().optional(),\n  source_entity_id: z.number(),\n  target_entity_id: z.number(),\n  description: z.string(),\n  last_modified_at: zodJsonDate().nullish(),\n  meta: z.object({}).passthrough(),\n  weight: z.number(),\n}) satisfies ZodType<KnowledgeGraphRelationship>;\n\nexport const knowledgeGraphSchema = z.object({\n  entities: entitySchema.array(),\n  relationships: relationshipSchema.array(),\n}) satisfies ZodType<KnowledgeGraph>;\n\nexport interface UpdateEntityParams {\n  name: string | null;\n  description: string | null;\n  meta: object | null;\n}\n\nexport interface CreateSynopsisEntityParams {\n  name: string;\n  description: string;\n  meta: object;\n  topic: string;\n  entities: number[];\n}\n\nexport interface UpdateRelationshipParams {\n  description: string | null;\n  meta: object | null;\n  weight: number | null;\n}\n\nexport interface GraphSearchParams {\n  query: string;\n  include_meta?: boolean;\n  depth?: number;\n  with_degree?: boolean;\n}\n\nexport interface KBRetrieveKnowledgeGraphParams {\n  query: string;\n  llm_id: number;\n  retrieval_config: {\n    knowledge_graph: KnowledgeGraphRetrievalConfig;\n  }\n}\n\nexport interface KnowledgeGraphRetrievalConfig {\n  depth?: number;\n  include_meta?: boolean;\n  with_degree?: boolean;\n  metadata_filter?: {\n    enabled?: boolean;\n    filters?: Record<string, any>\n  }\n}\n\nexport async function search (kbId: number, params: GraphSearchParams) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/search`), {\n    method: 'post',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(knowledgeGraphSchema));\n}\n\nexport async function searchEntity (kbId: number, query: string, top_k: number = 10) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entities/search`, { query, top_k }), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(entitySchema.array()));\n}\n\nexport async function getEntity (kbId: number, id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entities/${id}`), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(entitySchema));\n}\n\nexport async function updateEntity (kbId: number, id: number, params: UpdateEntityParams) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entities/${id}`), {\n    method: 'put',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(entitySchema));\n}\n\nexport async function createSynopsisEntity (kbId: number, params: CreateSynopsisEntityParams) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entities/synopsis`), {\n    method: 'post',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(entitySchema));\n}\n\nexport async function getEntitySubgraph (kbId: number, id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entities/${id}/subgraph`), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(knowledgeGraphSchema));\n}\n\nexport async function getEntireKnowledgeGraph (kbId: number, params: KBRetrieveKnowledgeGraphParams) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entire_graph`), {\n    method: 'post',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  })\n  .then(handleResponse(knowledgeGraphSchema));\n}\n\nexport async function streamEntireKnowledgeGraph (kbId: number): Promise<KnowledgeGraph> {\n  const entities: KnowledgeGraphEntity[] = [];\n  const relationships: KnowledgeGraphRelationship[] = [];\n  \n  const response = await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/entire_graph/stream`), {\n    method: 'GET',\n    headers: await authenticationHeaders(),\n    credentials: 'include',\n  });\n  \n  if (!response.ok) {\n    throw new Error(`${response.status} ${response.statusText}`);\n  }\n  \n  if (!response.body) {\n    throw new Error('Empty response body');\n  }\n  \n  const reader = response.body.pipeThrough(bufferedReadableStreamTransformer()).getReader();\n  \n  try {\n    while (true) {\n      const { done, value } = await reader.read();\n      if (done) break;\n      \n      if (value.trim() && value.startsWith('data: ')) {\n        const dataStr = value.substring(6).trim();\n        if (dataStr) {\n          try {\n            const data = JSON.parse(dataStr);\n            \n            if (data.type === 'entities') {\n              entities.push(...data.data);\n              // console.log(`Received ${data.data.length} entities, total: ${entities.length}`);\n            } else if (data.type === 'relationships') {\n              relationships.push(...data.data);\n              // console.log(`Received ${data.data.length} relationships, total: ${relationships.length}`);\n            } else if (data.type === 'complete') {\n              // console.log(`Streaming complete. Final counts - entities: ${entities.length}, relationships: ${relationships.length}`);\n              return { entities, relationships };\n            }\n          } catch (error) {\n            console.warn('Failed to parse streaming data:', error, 'Data:', dataStr);\n          }\n        }\n      }\n    }\n  } finally {\n    reader.releaseLock();\n  }\n  \n  return { entities, relationships };\n}\n\nexport async function getRelationship (kbId: number, id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/relationships/${id}`), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(relationshipSchema));\n}\n\nexport async function updateRelationship (kbId: number, id: number, params: UpdateRelationshipParams) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kbId}/graph/relationships/${id}`), {\n    method: 'put',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(relationshipSchema));\n}\n"
  },
  {
    "path": "frontend/app/src/api/knowledge-base.ts",
    "content": "import { type BaseCreateDatasourceParams, type CreateDatasourceSpecParams, type Datasource, type DatasourceKgIndexError, datasourceSchema, type DatasourceVectorIndexError } from '@/api/datasources';\nimport { documentSchema } from '@/api/documents';\nimport { type EmbeddingModelSummary, embeddingModelSummarySchema } from '@/api/embedding-models';\nimport { type LLMSummary, llmSummarySchema } from '@/api/llms';\nimport { type IndexProgress, indexSchema, indexStatusSchema, type IndexTotalStats, totalSchema } from '@/api/rag';\nimport { authenticationHeaders, handleErrors, handleResponse, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType } from 'zod';\n\nexport type KnowledgeBaseIndexMethod = 'vector' | 'knowledge_graph';\n\nexport interface CreateKnowledgeBaseParams {\n  name: string;\n  description?: string | null;\n  index_methods: KnowledgeBaseIndexMethod[];\n  llm_id?: number | null;\n  embedding_model_id?: number | null;\n  data_sources: (BaseCreateDatasourceParams & CreateDatasourceSpecParams)[];\n}\n\nexport interface UpdateKnowledgeBaseParams {\n  name?: string;\n  description?: string | null;\n}\n\nexport interface KnowledgeBaseSummary {\n  id: number;\n  name: string;\n  description: string | null;\n  index_methods: KnowledgeBaseIndexMethod[];\n  documents_total?: number;\n  data_sources_total?: number;\n  created_at: Date;\n  updated_at: Date;\n  creator: {\n    id: string;\n  };\n}\n\nexport interface KnowledgeBase extends KnowledgeBaseSummary {\n  data_sources: Datasource[];\n  llm?: LLMSummary | null;\n  embedding_model?: EmbeddingModelSummary | null;\n  chunking_config: KnowledgeBaseChunkingConfig | null;\n}\n\nexport type KnowledgeGraphIndexProgress = {\n  vector_index: IndexProgress\n  documents: IndexTotalStats\n  chunks: IndexTotalStats\n  kg_index?: IndexProgress\n  entities?: IndexTotalStats\n  relationships?: IndexTotalStats\n}\n\nexport type KnowledgeBaseSplitterType = KnowledgeBaseChunkingSplitterRule['splitter'];\n\nexport type KnowledgeBaseChunkingSentenceSplitterConfig = {\n  chunk_size: number\n  chunk_overlap: number\n  paragraph_separator: string\n}\n\nexport type KnowledgeBaseChunkingMarkdownSplitterConfig = {\n  chunk_size: number\n  chunk_header_level: number\n}\n\nexport type KnowledgeBaseChunkingSentenceSplitterRule = {\n  splitter: 'SentenceSplitter'\n  splitter_config: KnowledgeBaseChunkingSentenceSplitterConfig\n}\n\nexport type KnowledgeBaseChunkingMarkdownSplitterRule = {\n  splitter: 'MarkdownSplitter'\n  splitter_config: KnowledgeBaseChunkingMarkdownSplitterConfig\n}\n\nexport type KnowledgeBaseChunkingSplitterRule = KnowledgeBaseChunkingSentenceSplitterRule | KnowledgeBaseChunkingMarkdownSplitterRule;\n\nexport type KnowledgeBaseChunkingConfigGeneral = {\n  mode: 'general'\n} & KnowledgeBaseChunkingSentenceSplitterConfig;\n\nexport type KnowledgeBaseChunkingConfigAdvanced = {\n  mode: 'advanced'\n  rules: {\n    'text/plain': KnowledgeBaseChunkingSplitterRule;\n    'text/markdown': KnowledgeBaseChunkingSplitterRule\n  }\n}\n\nexport type KnowledgeBaseChunkingConfig = KnowledgeBaseChunkingConfigGeneral | KnowledgeBaseChunkingConfigAdvanced;\n\nexport type KnowledgeGraphDocumentChunk = z.infer<typeof knowledgeGraphDocumentChunkSchema>;\n\nconst knowledgeBaseChunkingSentenceSplitterConfigSchema = z.object({\n  chunk_size: z.number().int().min(1),\n  chunk_overlap: z.number().int().min(0),\n  paragraph_separator: z.string(),\n}) satisfies z.ZodType<KnowledgeBaseChunkingSentenceSplitterConfig, any, any>;\n\nconst knowledgeBaseChunkingMarkdownSplitterConfigSchema = z.object({\n  chunk_size: z.number().int().min(1),\n  chunk_header_level: z.number().int().min(1).max(6),\n}) satisfies z.ZodType<KnowledgeBaseChunkingMarkdownSplitterConfig, any, any>;\n\nconst knowledgeBaseChunkingSplitterRuleSchema = z.discriminatedUnion('splitter', [\n  z.object({\n    splitter: z.literal('MarkdownSplitter'),\n    splitter_config: knowledgeBaseChunkingMarkdownSplitterConfigSchema,\n  }),\n  z.object({\n    splitter: z.literal('SentenceSplitter'),\n    splitter_config: knowledgeBaseChunkingSentenceSplitterConfigSchema,\n  }),\n]) satisfies z.ZodType<KnowledgeBaseChunkingSplitterRule, any, any>;\n\nexport const knowledgeBaseChunkingConfigSchema = z.discriminatedUnion('mode', [\n  z.object({\n    mode: z.literal('general'),\n    chunk_size: z.number().int().min(1),\n    chunk_overlap: z.number().int().min(0),\n    paragraph_separator: z.string(),\n  }),\n  z.object({\n    mode: z.literal('advanced'),\n    rules: z.object({\n      'text/plain': knowledgeBaseChunkingSplitterRuleSchema,\n      'text/markdown': knowledgeBaseChunkingSplitterRuleSchema,\n    }),\n  }),\n]) satisfies z.ZodType<KnowledgeBaseChunkingConfig, any, any>;\n\nconst knowledgeBaseSummarySchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  description: z.string().nullable(),\n  index_methods: z.enum(['vector', 'knowledge_graph']).array(),\n  documents_total: z.number().optional(),\n  data_sources_total: z.number().optional(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n  creator: z.object({\n    id: z.string(),\n  }),\n}) satisfies ZodType<KnowledgeBaseSummary, any, any>;\n\nconst knowledgeBaseSchema = knowledgeBaseSummarySchema.extend({\n  data_sources: datasourceSchema.array(),\n  llm: llmSummarySchema.nullable().optional(),\n  embedding_model: embeddingModelSummarySchema.nullable().optional(),\n  chunking_config: knowledgeBaseChunkingConfigSchema.nullable(),\n}) satisfies ZodType<KnowledgeBase, any, any>;\n\nconst knowledgeGraphIndexProgressSchema = z.object({\n  vector_index: indexSchema,\n  documents: totalSchema,\n  chunks: totalSchema,\n  kg_index: indexSchema.optional(),\n  entities: totalSchema.optional(),\n  relationships: totalSchema.optional(),\n}) satisfies ZodType<KnowledgeGraphIndexProgress>;\n\nconst knowledgeGraphDocumentChunkSchema = z.object({\n  id: z.string(),\n  document_id: z.number(),\n  hash: z.string(),\n  text: z.string(),\n  meta: z.object({}).passthrough(),\n  embedding: z.number().array(),\n  relations: z.any(),\n  source_uri: z.string(),\n  index_status: indexStatusSchema,\n  index_result: z.string().nullable(),\n  created_at: zodJsonDate(),\n  updated_at: zodJsonDate(),\n});\n\nconst vectorIndexErrorSchema = z.object({\n  document_id: z.number(),\n  document_name: z.string(),\n  source_uri: z.string(),\n  error: z.string().nullable(),\n}) satisfies ZodType<DatasourceVectorIndexError, any, any>;\n\nconst kgIndexErrorSchema = z.object({\n  document_id: z.number(),\n  document_name: z.string(),\n  chunk_id: z.string(),\n  source_uri: z.string(),\n  error: z.string().nullable(),\n}) satisfies ZodType<DatasourceKgIndexError, any, any>;\n\nconst knowledgeBaseLinkedChatEngine = z.object({\n  id: z.number(),\n  name: z.string(),\n  is_default: z.boolean(),\n});\n\nexport async function listKnowledgeBases ({ page = 1, size = 10 }: PageParams) {\n  return await fetch(requestUrl('/api/v1/admin/knowledge_bases', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(knowledgeBaseSummarySchema)));\n}\n\nexport async function getKnowledgeBaseById (id: number): Promise<KnowledgeBase> {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(knowledgeBaseSchema));\n}\n\nexport async function getKnowledgeBaseDocumentChunks (id: number, documentId: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/documents/${documentId}/chunks`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(knowledgeGraphDocumentChunkSchema.array()));\n}\n\nexport async function getKnowledgeBaseDocument (id: number, documentId: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/documents/${documentId}`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(documentSchema.omit({ knowledge_base: true, data_source: true })));\n}\n\nexport async function getKnowledgeBaseLinkedChatEngines (id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/linked_chat_engines`), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(knowledgeBaseLinkedChatEngine.array()));\n}\n\nexport async function deleteKnowledgeBaseDocument (id: number, documentId: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/documents/${documentId}`), {\n    method: 'DELETE',\n    headers: await authenticationHeaders(),\n  })\n    .then(handleErrors);\n}\n\nexport async function rebuildKBDocumentIndex (kb_id: number, doc_id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${kb_id}/documents/${doc_id}/reindex`), {\n    method: 'POST',\n    headers: await authenticationHeaders(),\n  })\n    .then(handleErrors);\n}\n\nexport async function createKnowledgeBase (params: CreateKnowledgeBaseParams) {\n  return await fetch(requestUrl('/api/v1/admin/knowledge_bases'), {\n    method: 'POST',\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(knowledgeBaseSchema));\n}\n\nexport async function updateKnowledgeBase (id: number, params: UpdateKnowledgeBaseParams) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}`), {\n    method: 'PUT',\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n    body: JSON.stringify(params),\n  }).then(handleResponse(knowledgeBaseSchema));\n}\n\nexport async function getKnowledgeGraphIndexProgress (id: number): Promise<KnowledgeGraphIndexProgress> {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/overview`), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(knowledgeGraphIndexProgressSchema));\n}\n\nexport async function listKnowledgeBaseVectorIndexErrors (id: number, { page = 1, size = 10 }: PageParams = {}) {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/vector-index-errors`, { page, size }), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(zodPage(vectorIndexErrorSchema)));\n}\n\nexport async function listKnowledgeBaseKgIndexErrors (id: number, { page = 1, size = 10 }: PageParams = {}) {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/kg-index-errors`, { page, size }), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(zodPage(kgIndexErrorSchema)));\n}\n\nexport async function retryKnowledgeBaseAllFailedTasks (id: number) {\n  return fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}/retry-failed-index-tasks`), {\n    method: 'POST',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n  }).then(handleErrors);\n}\n\nexport async function deleteKnowledgeBase (id: number) {\n  return await fetch(requestUrl(`/api/v1/admin/knowledge_bases/${id}`), {\n    method: 'DELETE',\n    headers: await authenticationHeaders(),\n  })\n    .then(handleErrors);\n}\n"
  },
  {
    "path": "frontend/app/src/api/llms.ts",
    "content": "import { type ProviderOption, providerOptionSchema } from '@/api/providers';\nimport { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType, type ZodTypeDef } from 'zod';\n\nexport interface LLMSummary {\n  id: number;\n  name: string;\n  provider: string;\n  model: string;\n  is_default: boolean;\n}\n\nexport interface LLM extends LLMSummary {\n  config?: any;\n  created_at: Date | null;\n  updated_at: Date | null;\n}\n\nexport interface LlmOption extends ProviderOption {\n  default_llm_model: string;\n  llm_model_description: string;\n}\n\nexport interface CreateLLM {\n  name: string;\n  provider: string;\n  model: string;\n  config?: any;\n  is_default?: boolean;\n  credentials: string | object;\n}\n\nexport interface UpdateLLM {\n  name?: string;\n  config?: any;\n  credentials?: string | object;\n}\n\nexport const llmSummarySchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  provider: z.string(),\n  model: z.string(),\n  is_default: z.boolean(),\n}) satisfies ZodType<LLMSummary, ZodTypeDef, any>;\n\nconst llmSchema = llmSummarySchema.extend({\n  config: z.any(),\n  created_at: zodJsonDate().nullable(),\n  updated_at: zodJsonDate().nullable(),\n}) satisfies ZodType<LLM, ZodTypeDef, any>;\n\nconst llmOptionSchema = providerOptionSchema.and(z.object({\n  default_llm_model: z.string(),\n  llm_model_description: z.string(),\n})) satisfies ZodType<LlmOption, any, any>;\n\nexport async function listLlmOptions () {\n  return await fetch(requestUrl(`/api/v1/admin/llms/providers/options`), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(llmOptionSchema.array()));\n}\n\nexport async function listLlms ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<LLM>> {\n  return await fetch(requestUrl('/api/v1/admin/llms', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(llmSchema)));\n}\n\nexport async function getLlm (id: number): Promise<LLM> {\n  return await fetch(requestUrl(`/api/v1/admin/llms/${id}`), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(llmSchema));\n}\n\nexport async function createLlm (create: CreateLLM) {\n  return await fetch(requestUrl(`/api/v1/admin/llms`), {\n    method: 'POST',\n    body: JSON.stringify(create),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  }).then(handleResponse(llmSchema));\n}\n\nexport async function updateLlm (id: number, update: UpdateLLM) {\n  return await fetch(requestUrl(`/api/v1/admin/llms/${id}`), {\n    method: 'PUT',\n    body: JSON.stringify(update),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  }).then(handleResponse(llmSchema));\n}\n\nexport async function deleteLlm (id: number) {\n  await fetch(requestUrl(`/api/v1/admin/llms/${id}`), {\n    method: 'DELETE',\n    headers: await authenticationHeaders(),\n  }).then(handleErrors);\n}\n\nexport async function testLlm (createLLM: CreateLLM) {\n  return await fetch(requestUrl(`/api/v1/admin/llms/test`), {\n    method: 'POST',\n    body: JSON.stringify(createLLM),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(z.object({\n      success: z.boolean(),\n      error: z.string().optional(),\n    })));\n}\n"
  },
  {
    "path": "frontend/app/src/api/providers.ts",
    "content": "import { z, type ZodType } from 'zod';\n\nexport interface ProviderOption {\n  provider: string;\n  provider_display_name: string | null;\n  provider_description: string | null;\n  provider_url: string | null;\n  credentials_display_name: string;\n  credentials_description: string;\n  credentials_type: 'str' | 'dict';\n  default_credentials: any;\n  default_config: object;\n  config_description: string;\n}\n\nexport const providerOptionSchema = z.object({\n  provider: z.string(),\n  provider_display_name: z.string().nullable(),\n  provider_description: z.string().nullable(),\n  provider_url: z.string().nullable(),\n  credentials_display_name: z.string(),\n  credentials_description: z.string(),\n  default_config: z.object({}).passthrough(),\n  config_description: z.string(),\n}).and(z.discriminatedUnion('credentials_type', [\n  z.object({\n    credentials_type: z.literal('str'),\n    default_credentials: z.string(),\n  }),\n  z.object({\n    credentials_type: z.literal('dict'),\n    default_credentials: z.object({}).passthrough(),\n  }),\n])) satisfies ZodType<ProviderOption, any, any>;\n"
  },
  {
    "path": "frontend/app/src/api/rag.ts",
    "content": "import { z, type ZodType } from 'zod';\n\nexport const indexStatuses = [\n  'not_started',\n  'pending',\n  'running',\n  'completed',\n  'failed',\n] as const;\n\nexport type IndexStatus = typeof indexStatuses[number];\n\nexport type IndexProgress = Partial<Record<IndexStatus, number>>\n\nexport type IndexTotalStats = {\n  total: number\n}\n\nexport const indexStatusSchema = z.enum(indexStatuses) satisfies ZodType<IndexStatus>;\n\nexport const totalSchema = z.object({\n  total: z.number(),\n}) satisfies ZodType<IndexTotalStats>;\n\nexport const indexSchema = z.object({\n  not_started: z.number().optional(),\n  pending: z.number().optional(),\n  running: z.number().optional(),\n  completed: z.number().optional(),\n  failed: z.number().optional(),\n}) satisfies ZodType<IndexProgress>;\n"
  },
  {
    "path": "frontend/app/src/api/rerankers.ts",
    "content": "import { type ProviderOption, providerOptionSchema } from '@/api/providers';\nimport { authenticationHeaders, handleErrors, handleResponse, type Page, type PageParams, requestUrl, zodPage } from '@/lib/request';\nimport { zodJsonDate } from '@/lib/zod';\nimport { z, type ZodType, type ZodTypeDef } from 'zod';\n\nexport interface Reranker {\n  id: number;\n  name: string;\n  provider: string;\n  model: string;\n  top_n: number;\n  config?: any;\n  is_default: boolean;\n  created_at: Date | null;\n  updated_at: Date | null;\n}\n\nexport interface RerankerOption extends ProviderOption {\n  default_reranker_model: string;\n  reranker_model_description: string;\n  default_top_n: number;\n}\n\nexport interface CreateReranker {\n  name: string;\n  provider: string;\n  model: string;\n  config?: any;\n  top_n: number;\n  is_default?: boolean;\n  credentials: string | object;\n}\n\nexport interface UpdateReranker {\n  name?: string;\n  config?: any;\n  top_n?: number;\n  credentials?: string | object;\n}\n\nconst rerankerSchema = z.object({\n  id: z.number(),\n  name: z.string(),\n  provider: z.string(),\n  model: z.string(),\n  top_n: z.number(),\n  config: z.any(),\n  is_default: z.boolean(),\n  created_at: zodJsonDate().nullable(),\n  updated_at: zodJsonDate().nullable(),\n}) satisfies ZodType<Reranker, ZodTypeDef, any>;\n\nconst rerankerOptionSchema = providerOptionSchema.and(z.object({\n  default_top_n: z.number(),\n  default_reranker_model: z.string(),\n  reranker_model_description: z.string(),\n})) satisfies ZodType<RerankerOption, any, any>;\n\nexport async function listRerankerOptions () {\n  return await fetch(requestUrl(`/api/v1/admin/reranker-models/providers/options`), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(rerankerOptionSchema.array()));\n}\n\nexport async function listRerankers ({ page = 1, size = 10 }: PageParams = {}): Promise<Page<Reranker>> {\n  return await fetch(requestUrl('/api/v1/admin/reranker-models', { page, size }), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(zodPage(rerankerSchema)));\n}\n\nexport async function getReranker (id: number): Promise<Reranker> {\n  return await fetch(requestUrl(`/api/v1/admin/reranker-models/${id}`), {\n    headers: await authenticationHeaders(),\n  }).then(handleResponse(rerankerSchema));\n}\n\nexport async function createReranker (create: CreateReranker) {\n  return await fetch(requestUrl(`/api/v1/admin/reranker-models`), {\n    method: 'POST',\n    body: JSON.stringify(create),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  }).then(handleResponse(rerankerSchema));\n}\n\nexport async function updateReranker (id: number, update: UpdateReranker) {\n  return await fetch(requestUrl(`/api/v1/admin/reranker-models/${id}`), {\n    method: 'PUT',\n    body: JSON.stringify(update),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  }).then(handleResponse(rerankerSchema));\n}\n\nexport async function deleteReranker (id: number) {\n  await fetch(requestUrl(`/api/v1/admin/reranker-models/${id}`), {\n    method: 'DELETE',\n    headers: await authenticationHeaders(),\n  }).then(handleErrors);\n}\n\nexport async function testReranker (createReranker: CreateReranker) {\n  return await fetch(requestUrl(`/api/v1/admin/reranker-models/test`), {\n    method: 'POST',\n    body: JSON.stringify(createReranker),\n    headers: {\n      'Content-Type': 'application/json',\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(z.object({\n      success: z.boolean(),\n      error: z.string().optional(),\n    })));\n}\n"
  },
  {
    "path": "frontend/app/src/api/site-settings.ts",
    "content": "import { authenticationHeaders, handleErrors, handleResponse, requestUrl } from '@/lib/request';\nimport { z } from 'zod';\n\ninterface SettingItemBase<K, T> {\n  name: string;\n  description: string;\n  group: string;\n  data_type: K;\n  value: T | null;\n  client: boolean | null;\n  default: T;\n}\n\nexport type IntSettingItem = SettingItemBase<'int', number>\nexport type FloatSettingItem = SettingItemBase<'float', number>\nexport type BoolSettingItem = SettingItemBase<'bool', boolean>\nexport type StringSettingItem = SettingItemBase<'str', string>\nexport type ListSettingItem = SettingItemBase<'list', any[]>\nexport type DictSettingItem = SettingItemBase<'dict', object>\n\nexport type SettingItem =\n  IntSettingItem\n  | FloatSettingItem\n  | BoolSettingItem\n  | StringSettingItem\n  | ListSettingItem\n  | DictSettingItem;\n\nexport interface PublicWebsiteSettings {\n  'title': string;\n  'description': string;\n  'homepage_title': string;\n  'homepage_example_questions': string[];\n  'homepage_footer_links': { text: string, href: string }[];\n  'logo_in_dark_mode': string;\n  'logo_in_light_mode': string;\n  'social_github': string | null;\n  'social_twitter': string | null;\n  'social_discord': string | null;\n  'custom_js_example_questions': string[];\n  'custom_js_button_label': string;\n  'custom_js_button_img_src': string;\n  'custom_js_logo_src': string;\n  'ga_id': string | null;\n  'max_upload_file_size': number | null;\n  'enable_post_verifications': boolean;\n  'enable_post_verifications_for_widgets': boolean;\n}\n\nconst settingsItemSchema = z.object({\n  name: z.string(),\n  description: z.string(),\n  client: z.boolean().nullable(),\n  group: z.string(),\n  // data_type: z.enum(['list', 'dict', 'str', 'int', 'float', 'bool']),\n}).and(z.discriminatedUnion('data_type', [\n  z.object({\n    data_type: z.literal('str'),\n    value: z.string().nullable(),\n    default: z.string(),\n  }),\n  z.object({\n    data_type: z.literal('int'),\n    value: z.number().int().nullable(),\n    default: z.number().int(),\n  }),\n  z.object({\n    data_type: z.literal('float'),\n    value: z.number().nullable(),\n    default: z.number(),\n  }),\n  z.object({\n    data_type: z.literal('bool'),\n    value: z.boolean().nullable(),\n    default: z.coerce.boolean(),\n  }),\n  z.object({\n    data_type: z.literal('list'),\n    value: z.any().array().nullable(),\n    default: z.any().array(),\n  }),\n  z.object({\n    data_type: z.literal('dict'),\n    value: z.object({}).passthrough().nullable(),\n    default: z.object({}).passthrough(),\n  }),\n]));\n\nexport type AllSettings = Record<string, SettingItem>\n\nexport async function getAllSiteSettings (): Promise<AllSettings> {\n  return await fetch(requestUrl(`/api/v1/admin/site-settings`),\n    {\n      headers: await authenticationHeaders(),\n    })\n    .then(handleResponse(z.record(settingsItemSchema)));\n}\n\nexport async function updateSiteSetting (name: string, value: any) {\n  await fetch(requestUrl(`/api/v1/admin/site-settings/${name}`), {\n    method: 'PUT',\n    headers: {\n      ...await authenticationHeaders(),\n      'Content-Type': 'application/json',\n    },\n    body: JSON.stringify({ value }),\n  }).then(handleErrors);\n}\n\nexport async function getPublicSiteSettings (): Promise<PublicWebsiteSettings> {\n  return fetch(requestUrl(`/api/v1/site-config`), {\n    headers: await authenticationHeaders(),\n    credentials: 'include',\n  }).then(handleErrors).then(res => res.json());\n}\n"
  },
  {
    "path": "frontend/app/src/api/stats.ts",
    "content": "import { authenticationHeaders, handleResponse, requestUrl } from '@/lib/request';\nimport { zodDateOnlyString } from '@/lib/zod';\nimport { format } from 'date-fns';\nimport { z, type ZodType } from 'zod';\n\nexport interface TrendBaseItem {\n  date: Date;\n}\n\nexport interface ChatUserTrendItem extends TrendBaseItem {\n  user: number;\n  anonymous: number;\n}\n\nexport type ChatOriginTrendItem = TrendBaseItem & Omit<{\n  [key: string]: number\n}, 'date'>\n\nexport interface TrendResponse<T> {\n  start_date: Date;\n  end_date: Date;\n  values: T[];\n}\n\nconst trendBaseSchema = z.object({\n  date: zodDateOnlyString(),\n});\n\nconst chatUserTrendItemSchema = trendBaseSchema.extend({\n  user: z.number(),\n  anonymous: z.number(),\n}) satisfies ZodType<ChatUserTrendItem, any, any>;\n\nconst chatOriginTrendItemSchema = trendBaseSchema.and(\n  z.preprocess((input) => {\n    if (input && typeof input === 'object') {\n      const { date: _, ...ret } = input as any;\n      return ret;\n    } else {\n      return input;\n    }\n  }, z.record(z.string(), z.number())),\n) satisfies ZodType<ChatOriginTrendItem, any, any>;\n\nfunction trendResponse<T> (item: ZodType<T, any, any>): ZodType<TrendResponse<T>, any, any> {\n  return z.object({\n    start_date: zodDateOnlyString(),\n    end_date: zodDateOnlyString(),\n    values: item.array(),\n  });\n}\n\nfunction trendParams (start: Date, end: Date) {\n  return {\n    start_date: format(start, 'yyyy-MM-dd'),\n    end_date: format(end, 'yyyy-MM-dd'),\n  };\n}\n\nexport async function getChatUserTrend (startDate: Date, endDate: Date) {\n  return await fetch(requestUrl('/api/v1/admin/stats/trend/chat-user', trendParams(startDate, endDate)), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(trendResponse(chatUserTrendItemSchema)));\n}\n\nexport async function getChatOriginTrend (startDate: Date, endDate: Date) {\n  return await fetch(requestUrl('/api/v1/admin/stats/trend/chat-origin', trendParams(startDate, endDate)), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n  })\n    .then(handleResponse(trendResponse(chatOriginTrendItemSchema)));\n}\n"
  },
  {
    "path": "frontend/app/src/api/system.ts",
    "content": "import { authenticationHeaders, handleResponse, requestUrl } from '@/lib/request';\n\nimport { z } from 'zod';\n\nexport interface RequiredBootstrapStatus {\n  default_llm: boolean;\n  default_embedding_model: boolean;\n  default_chat_engine: boolean;\n  knowledge_base: boolean;\n}\n\nexport interface OptionalBootstrapStatus {\n  langfuse: boolean;\n  default_reranker: boolean;\n}\n\nexport interface NeedMigrationStatus {\n  chat_engines_without_kb_configured?: number[];\n}\n\nexport interface BootstrapStatus {\n  required: RequiredBootstrapStatus;\n  optional: OptionalBootstrapStatus;\n  need_migration: NeedMigrationStatus;\n}\n\nconst requiredBootstrapStatusSchema = z.object({\n  default_llm: z.boolean(),\n  default_embedding_model: z.boolean(),\n  default_chat_engine: z.boolean(),\n  knowledge_base: z.boolean(),\n});\n\nconst optionalBootstrapStatusSchema = z.object({\n  langfuse: z.boolean(),\n  default_reranker: z.boolean(),\n});\n\nconst needMigrationStatusSchema = z.object({\n  chat_engines_without_kb_configured: z.number().array().optional(),\n});\n\nconst bootstrapStatusSchema = z.object({\n  required: requiredBootstrapStatusSchema,\n  optional: optionalBootstrapStatusSchema,\n  need_migration: needMigrationStatusSchema,\n});\n\nexport async function getBootstrapStatus (): Promise<BootstrapStatus> {\n  return await fetch(requestUrl(`/api/v1/system/bootstrap-status`), {\n    headers: {\n      ...await authenticationHeaders(),\n    },\n    credentials: 'include',\n  }).then(handleResponse(bootstrapStatusSchema));\n}\n\nexport function isBootstrapStatusPassed (bootstrapStatus: BootstrapStatus): boolean {\n  return Object.values(bootstrapStatus.required).reduce((res, flag) => res && flag, true);\n}\n"
  },
  {
    "path": "frontend/app/src/api/users.ts",
    "content": "import { authenticationHeaders, handleResponse, requestUrl } from '@/lib/request';\nimport { z } from 'zod';\n\nexport interface MeInfo {\n  id: string;\n  email: string;\n  is_active: boolean;\n  is_superuser: boolean;\n  is_verified: boolean;\n}\n\nconst userSchema = z.object({\n  id: z.string(),\n  email: z.string(),\n  is_active: z.boolean(),\n  is_superuser: z.boolean(),\n  is_verified: z.boolean(),\n});\n\nexport async function getMe (): Promise<MeInfo> {\n  return await fetch(requestUrl('/api/v1/users/me'), {\n    headers: await authenticationHeaders(),\n  })\n    .then(handleResponse(userSchema));\n}\n"
  },
  {
    "path": "frontend/app/src/app/(experimental)/experimental-features/route.ts",
    "content": "import { experimentalFeatures } from '@/experimental/experimental-features';\nimport { NextResponse } from 'next/server';\n\nexport function GET () {\n  return NextResponse.json(experimentalFeatures(), {\n    headers: {\n      'Access-Control-Allow-Origin': '*',\n      'Access-Control-Allow-Methods': 'GET',\n    },\n  });\n}\n\nexport const dynamic = 'force-dynamic';\n\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(.)auth/login/loading.tsx",
    "content": "'use client';\n\nimport { Loader } from '@/components/loader';\n\nexport default function Loading () {\n  return (\n    <Loader loading>\n      Loading\n    </Loader>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(.)auth/login/page.client.tsx",
    "content": "'use client';\n\nimport { Signin } from '@/components/signin';\nimport { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle } from '@/components/ui/dialog';\nimport { useRouter } from 'next/navigation';\n\nexport function SigninDialog ({ callbackUrl }: { callbackUrl?: string }) {\n  const router = useRouter();\n\n  return (\n    <Dialog\n      open\n      onOpenChange={(open) => {\n        if (!open) {\n          router.back();\n        }\n      }}\n    >\n      <DialogContent>\n        <DialogHeader>\n          <DialogTitle className=\"text-2xl font-normal\">\n            Sign In\n          </DialogTitle>\n          <DialogDescription>\n            Sign in to continue to your account.\n          </DialogDescription>\n        </DialogHeader>\n        <Signin callbackUrl={callbackUrl} />\n      </DialogContent>\n    </Dialog>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(.)auth/login/page.tsx",
    "content": "import { headers } from 'next/headers';\nimport { SigninDialog } from './page.client';\n\nexport default async function Page () {\n  const referer = (await headers()).get('Referer') ?? undefined;\n\n  return (\n    <SigninDialog callbackUrl={referer} />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/chat-engines/[id]/page.tsx",
    "content": "import { getChatEngine, getDefaultChatEngineOptions } from '@/api/chat-engines';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { UpdateChatEngineForm } from '@/components/chat-engine/update-chat-engine-form';\nimport { getBootstrapStatus } from '@/api/system';\n\nexport default async function ChatEnginePage(props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const [chatEngine, defaultChatEngineOptions, bootstrapStatus] = await Promise.all([\n    getChatEngine(parseInt(params.id)),\n    getDefaultChatEngineOptions(),\n    getBootstrapStatus(),\n  ]);\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Chat Engines', docsUrl: 'https://autoflow.tidb.ai/chat-engine', url: '/chat-engines' },\n          {\n            title: chatEngine.name,\n            alert: bootstrapStatus.need_migration.chat_engines_without_kb_configured?.includes(chatEngine.id) ? {\n              variant: 'warning',\n              content: 'KnowledgeBase not configured',\n            } : undefined,\n          },\n        ]}\n      />\n      <UpdateChatEngineForm chatEngine={chatEngine} defaultChatEngineOptions={defaultChatEngineOptions} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/chat-engines/new/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateChatEngineForm } from '@/components/chat-engine/create-chat-engine-form';\nimport { getDefaultChatEngineOptions } from '@/api/chat-engines';\n\nexport default async function NewChatEnginePage () {\n  const defaultOptions = await getDefaultChatEngineOptions();\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Chat Engines', docsUrl: 'https://autoflow.tidb.ai/chat-engine', url: '/chat-engines' },\n          { title: 'New' },\n        ]}\n      />\n      <CreateChatEngineForm defaultChatEngineOptions={defaultOptions} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/chat-engines/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { ChatEnginesTable } from '@/components/chat-engine/chat-engines-table';\nimport { NextLink } from '@/components/nextjs/NextLink';\n\nexport default function ChatEnginesPage () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Chat Engines', docsUrl: 'https://autoflow.tidb.ai/chat-engine' },\n        ]}\n      />\n      <NextLink href=\"/chat-engines/new\">New Chat Engine</NextLink>\n      <ChatEnginesTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/embedding-models/[id]/page.tsx",
    "content": "import { getEmbeddingModel } from '@/api/embedding-models';\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { UpdateEmbeddingModelForm } from '@/components/embedding-models/UpdateEmbeddingModelForm';\n\nexport default async function Page (props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const embeddingModel = await getEmbeddingModel(parseInt(params.id));\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'Embedding Models', url: '/embedding-models', docsUrl: 'https://autoflow.tidb.ai/embedding-model' },\n          { title: embeddingModel.name },\n        ]}\n      />\n      <UpdateEmbeddingModelForm embeddingModel={embeddingModel} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/embedding-models/create/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateEmbeddingModelForm } from '@/components/embedding-models/CreateEmbeddingModelForm';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\n\nexport default function Page () {\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'Embedding Models', url: '/embedding-models', docsUrl: 'https://autoflow.tidb.ai/embedding-model' },\n          { title: 'Create' },\n        ]}\n      />\n      <CreateEmbeddingModelForm\n        transitioning={transitioning}\n        onCreated={embeddingModel => {\n          startTransition(() => {\n            router.push(`/embedding-models/${embeddingModel.id}`);\n            router.refresh();\n          });\n        }}\n      />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/embedding-models/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { EmbeddingModelsTable } from '@/components/embedding-models/EmbeddingModelsTable';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { PlusIcon } from 'lucide-react';\n\nexport default function EmbeddingModelPage () {\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'Embedding Models', docsUrl: 'https://autoflow.tidb.ai/embedding-model' },\n        ]}\n      />\n      <NextLink href=\"/embedding-models/create\">\n        <PlusIcon className=\"size-4\" />\n        New Embedding Model\n      </NextLink>\n      <EmbeddingModelsTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/items/[itemId]/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { Loader2Icon } from 'lucide-react';\nimport { UpdateEvaluationDatasetItemForm } from '@/components/evaluations/update-evaluation-dataset-item-form';\nimport { use } from 'react';\nimport { useEvaluationDataset } from '@/components/evaluations/hooks';\n\nexport default function Page (props: { params: Promise<{ id: string, itemId: string }> }) {\n  const params = use(props.params);\n  const evaluationDatasetId = parseInt(decodeURIComponent(params.id));\n  const evaluationDatasetItemId = parseInt(decodeURIComponent(params.itemId));\n\n  const { evaluationDataset } = useEvaluationDataset(evaluationDatasetId);\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets', url: '/evaluation/datasets' },\n          { title: evaluationDataset?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />, url: `/evaluation/datasets/${evaluationDatasetId}` },\n          { title: `${evaluationDatasetItemId}` },\n        ]}\n      />\n      <UpdateEvaluationDatasetItemForm\n        evaluationDatasetId={evaluationDatasetId}\n        evaluationDatasetItemId={evaluationDatasetItemId}\n      />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/items/new/page.tsx",
    "content": "'use client';\n\nimport { mutateEvaluationDataset, useEvaluationDataset } from '@/components/evaluations/hooks';\nimport { use, useTransition } from 'react';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateEvaluationDatasetItemForm } from '@/components/evaluations/create-evaluation-dataset-item-form';\nimport { Loader2Icon } from 'lucide-react';\nimport { useRouter } from 'next/navigation';\n\nexport default function CreateEvaluationDatasetItemPage (props: { params: Promise<{ id: string }> }) {\n  const params = use(props.params);\n  const evaluationDatasetId = parseInt(decodeURIComponent(params.id));\n\n  const { evaluationDataset } = useEvaluationDataset(evaluationDatasetId);\n\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets', url: '/evaluation/datasets' },\n          { title: evaluationDataset?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />, url: `/evaluation/datasets/${evaluationDatasetId}` },\n          { title: 'New Item' },\n        ]}\n      />\n      <CreateEvaluationDatasetItemForm\n        evaluationDatasetId={evaluationDatasetId}\n        transitioning={transitioning}\n        onCreated={() => {\n          startTransition(() => {\n            router.push(`/evaluation/datasets/${evaluationDatasetId}`);\n            router.refresh();\n            void mutateEvaluationDataset(evaluationDatasetId);\n          });\n        }}\n      />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/not-found.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { ResourceNotFound } from '@/components/resource-not-found';\n\nexport default function NotFound () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets', url: '/evaluation/datasets' },\n          { title: <span className=\"text-destructive\">Not Found</span> },\n        ]}\n      />\n      <ResourceNotFound resource=\"Evaluation Dataset\" buttonHref=\"/evaluation/datasets\" />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { EvaluationDatasetInfo } from '@/components/evaluations/evaluation-dataset-info';\nimport { EvaluationDatasetItemsTable } from '@/components/evaluations/evaluation-dataset-items-table';\nimport { Loader2Icon } from 'lucide-react';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { Separator } from '@/components/ui/separator';\nimport { use } from 'react';\nimport { useEvaluationDataset } from '@/components/evaluations/hooks';\n\nexport default function EvaluationDatasetPage (props: { params: Promise<{ id: string }> }) {\n  const params = use(props.params);\n  const evaluationDatasetId = parseInt(decodeURIComponent(params.id));\n\n  const { evaluationDataset, isLoading } = useEvaluationDataset(evaluationDatasetId);\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets', url: '/evaluation/datasets' },\n          { title: evaluationDataset?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" /> },\n        ]}\n      />\n      <EvaluationDatasetInfo evaluationDatasetId={evaluationDatasetId} />\n      <Separator className=\"space-y-6\" />\n      <NextLink href={`/evaluation/datasets/${evaluationDatasetId}/items/new`} disabled={isLoading}>\n        New Item\n      </NextLink>\n      <EvaluationDatasetItemsTable evaluationDatasetId={evaluationDatasetId} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/datasets/create/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateEvaluationDatasetForm } from '@/components/evaluations/create-evaluation-dataset-form';\nimport { mutateEvaluationDatasets } from '@/components/evaluations/hooks';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\n\nexport default function EvaluationTaskPage () {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets', url: '/evaluation/datasets' },\n          { title: 'Create' },\n        ]}\n      />\n      <CreateEvaluationDatasetForm\n        transitioning={transitioning}\n        onCreated={evaluationDataset => {\n          void mutateEvaluationDatasets();\n          startTransition(() => {\n            router.push(`/evaluation/datasets/${evaluationDataset.id}`);\n            router.refresh();\n          });\n        }}\n      />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/datasets/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { EvaluationDatasetsTable } from '@/components/evaluations/evaluation-datasets-table';\nimport { NextLink } from '@/components/nextjs/NextLink';\n\nexport default function EvaluationDatasetsPage () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets' },\n        ]}\n      />\n      <NextLink href=\"/evaluation/datasets/create\">New Evaluation Dataset</NextLink>\n      <EvaluationDatasetsTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/page.tsx",
    "content": "import { redirect } from 'next/navigation';\n\nexport default function EvaluationsPage () {\n  redirect('/evaluation/tasks');\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/tasks/[id]/not-found.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { ResourceNotFound } from '@/components/resource-not-found';\n\nexport default function NotFound () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Datasets', url: '/evaluation/tasks' },\n          { title: <span className=\"text-destructive\">Not Found</span> },\n        ]}\n      />\n      <ResourceNotFound resource=\"Evaluation Task\" buttonHref=\"/evaluation/tasks\" />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/tasks/[id]/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { EvaluationTaskInfo } from '@/components/evaluations/evaluation-task-info';\nimport { EvaluationTaskItemsTable } from '@/components/evaluations/evaluation-task-items-table';\nimport { Loader2Icon } from 'lucide-react';\nimport { use } from 'react';\nimport { useEvaluationTask } from '@/components/evaluations/hooks';\n\nexport default function EvaluationTaskPage (props: { params: Promise<{ id: string }> }) {\n  const params = use(props.params);\n  const evaluationTaskId = parseInt(decodeURIComponent(params.id));\n\n  const { evaluationTask } = useEvaluationTask(evaluationTaskId);\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Tasks', url: '/evaluation/tasks' },\n          { title: evaluationTask?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" /> },\n        ]}\n      />\n      <EvaluationTaskInfo evaluationTaskId={evaluationTaskId} />\n      <EvaluationTaskItemsTable evaluationTaskId={evaluationTaskId} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/tasks/create/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateEvaluationTaskForm } from '@/components/evaluations/create-evaluation-task-form';\nimport { mutateEvaluationTasks } from '@/components/evaluations/hooks';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\n\nexport default function EvaluationTaskPage () {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Tasks', url: '/evaluation/tasks' },\n          { title: 'Create' },\n        ]}\n      />\n      <CreateEvaluationTaskForm\n        transitioning={transitioning}\n        onCreated={evaluationTask => {\n          void mutateEvaluationTasks();\n          startTransition(() => {\n            router.push(`/evaluation/tasks/${evaluationTask.id}`);\n            router.refresh();\n          });\n        }}\n      />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/evaluation/tasks/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { EvaluationTasksTable } from '@/components/evaluations/evaluation-tasks-table';\nimport { NextLink } from '@/components/nextjs/NextLink';\n\nexport default function EvaluationTasksPage () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Evaluation', docsUrl: 'https://autoflow.tidb.ai/evaluation' },\n          { title: 'Tasks' },\n        ]}\n      />\n      <NextLink href=\"/evaluation/tasks/create\">New Evaluation Task</NextLink>\n      <EvaluationTasksTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/feedbacks/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { FeedbacksTable } from '@/components/feedbacks/feedbacks-table';\n\nexport default function ChatEnginesPage () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Feedbacks' },\n        ]}\n      />\n      <FeedbacksTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(special)/data-sources/new/page.tsx",
    "content": "'use client';\n;\n\nimport { mutateKnowledgeBases, useKnowledgeBase } from '@/components/knowledge-base/hooks';\nimport { use, useTransition } from 'react';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateDatasourceForm } from '@/components/datasource/create-datasource-form';\nimport { Loader2Icon } from 'lucide-react';\nimport { useRouter } from 'next/navigation';\n\nexport default function NewKnowledgeBaseDataSourcePage (props: { params: Promise<{ id: string }> }) {\n  const params = use(props.params);\n  const id = parseInt(decodeURIComponent(params.id));\n  const { knowledgeBase } = useKnowledgeBase(id);\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Knowledge Bases', url: '/knowledge-bases', docsUrl: 'https://autoflow.tidb.ai/knowledge-base' },\n          { title: knowledgeBase?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />, url: `/knowledge-bases/${id}` },\n          { title: 'DataSources', url: `/knowledge-bases/${id}/data-sources` },\n          { title: 'New' },\n        ]}\n      />\n      <CreateDatasourceForm\n        knowledgeBaseId={id}\n        transitioning={transitioning}\n        onCreated={() => {\n          startTransition(() => {\n            router.back();\n          });\n          mutateKnowledgeBases();\n        }}\n      />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(special)/documents/[documentId]/chunks/page.tsx",
    "content": "'use client';;\n\nimport { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';\nimport { getKnowledgeBaseDocument, getKnowledgeBaseDocumentChunks } from '@/api/knowledge-base';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { DateFormat } from '@/components/date-format';\nimport { Loader2Icon } from 'lucide-react';\nimport { use } from \"react\";\nimport { useKnowledgeBase } from '@/components/knowledge-base/hooks';\nimport useSWR from 'swr';\n\nexport default function DocumentChunksPage(props: { params: Promise<{ id: string, documentId: string }> }) {\n  const params = use(props.params);\n  const kbId = parseInt(decodeURIComponent(params.id));\n  const documentId = parseInt(decodeURIComponent(params.documentId));\n  const { knowledgeBase } = useKnowledgeBase(kbId);\n\n  const { data: document } = useSWR(`api.knowledge-bases.${kbId}.documents.${documentId}`, () => getKnowledgeBaseDocument(kbId, documentId));\n\n  const { data = [], isLoading } = useSWR(`api.knowledge-bases.${kbId}.documents.${documentId}.chunks`, () => getKnowledgeBaseDocumentChunks(kbId, documentId), {\n    revalidateOnFocus: false,\n  });\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Knowledge Bases', url: '/knowledge-bases', docsUrl: 'https://autoflow.tidb.ai/knowledge-base' },\n          { title: knowledgeBase?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />, url: `/knowledge-bases/${kbId}` },\n          { title: document?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" /> },\n          { title: 'Chunks' },\n        ]}\n      />\n      <div className=\"space-y-6 max-w-screen-sm\">\n        {data.map(chunk => (\n          <Card key={chunk.id}>\n            <CardHeader>\n              <CardTitle className=\"text-base\">\n                {chunk.id}\n              </CardTitle>\n              <CardDescription className=\"text-xs\">\n                Hash: <code>{chunk.hash}</code>\n                <br />\n                Source: {chunk.source_uri}\n                <br />\n                Updated At: <DateFormat date={chunk.updated_at} />\n              </CardDescription>\n            </CardHeader>\n            <CardContent className=\"space-y-4\">\n              <section className=\"space-y-2\">\n                <h4>Meta</h4>\n                <CodeInput disabled language=\"json\" value={JSON.stringify(chunk.meta, undefined, 2)} />\n              </section>\n              <section className=\"space-y-2\">\n                <h4>Content</h4>\n                <CodeInput className=\"h-96\" disabled language=\"markdown\" value={chunk.text} />\n              </section>\n              <section className=\"space-y-2\">\n                <h4>Embedding</h4>\n                <CodeInput className=\"h-96\" disabled language=\"json\" value={JSON.stringify(chunk.embedding, undefined, 2)} />\n              </section>\n            </CardContent>\n          </Card>\n        ))}\n      </div>\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/data-sources/page.tsx",
    "content": "'use client';;\nimport { use } from \"react\";\n\nimport { DatasourceCard } from '@/components/datasource/datasource-card';\nimport { DatasourceCreateOption } from '@/components/datasource/datasource-create-option';\nimport { NoDatasourcePlaceholder } from '@/components/datasource/no-datasource-placeholder';\nimport { useAllKnowledgeBaseDataSources } from '@/components/knowledge-base/hooks';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { FileDownIcon, GlobeIcon, PaperclipIcon } from 'lucide-react';\n\nexport default function KnowledgeBaseDataSourcesPage(props: { params: Promise<{ id: string }> }) {\n  const params = use(props.params);\n  const id = parseInt(decodeURIComponent(params.id));\n  const { data: dataSources, isLoading } = useAllKnowledgeBaseDataSources(id);\n\n  return (\n    <div className=\"space-y-8 max-w-screen-sm\">\n      <section className=\"space-y-4\">\n        <h3>Create Data Source</h3>\n        <div className=\"grid md:grid-cols-3 gap-4\">\n          <DatasourceCreateOption\n            knowledgeBaseId={id}\n            type=\"file\"\n            icon={<PaperclipIcon className=\"size-4 flex-shrink-0\" />}\n            title=\"Files\"\n          >\n            Upload files\n          </DatasourceCreateOption>\n          <DatasourceCreateOption\n            knowledgeBaseId={id}\n            type=\"web_single_page\"\n            icon={<FileDownIcon className=\"size-4 flex-shrink-0\" />}\n            title=\"Web Pages\"\n          >\n            Select pages.\n          </DatasourceCreateOption>\n          <DatasourceCreateOption\n            knowledgeBaseId={id}\n            type=\"web_sitemap\"\n            icon={<GlobeIcon className=\"size-4 flex-shrink-0\" />}\n            title=\"Website by sitemap\"\n          >\n            Select web sitemap.\n          </DatasourceCreateOption>\n        </div>\n      </section>\n      <section className=\"space-y-4\">\n        <h3>Browse existing Data Sources</h3>\n        {isLoading && <Skeleton className=\"h-20 rounded-lg\" />}\n        {dataSources?.map(datasource => (\n          <DatasourceCard key={datasource.id} knowledgeBaseId={id} datasource={datasource} />\n        ))}\n        {dataSources?.length === 0 && (\n          <NoDatasourcePlaceholder />\n        )}\n      </section>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/index-progress/page.tsx",
    "content": "import { KnowledgeBaseIndexProgress } from '@/components/knowledge-base/knowledge-base-index';\n\nexport default async function KnowledgeBaseIndexProgressPage(props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const id = parseInt(decodeURIComponent(params.id));\n\n  return (\n    <section className=\"space-y-2\">\n      <h3 className=\"text-lg font-medium\">Index Progress</h3>\n      <KnowledgeBaseIndexProgress id={id} />\n    </section>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/knowledge-graph-explorer/create-synopsis-entity/page.tsx",
    "content": "'use client';;\nimport { use } from \"react\";\n\nimport type { KnowledgeGraphEntity } from '@/api/graph';\nimport { GraphCreateEntity } from '@/components/graph/GraphCreateEntity';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { useRouter } from 'next/navigation';\n\nexport default function CreateSynopsisEntityPage(props: { params: Promise<{ id: string }> }) {\n  const params = use(props.params);\n  const kbId = parseInt(decodeURIComponent(params.id));\n  const router = useRouter();\n\n  const handleCreateEntity = (entity: KnowledgeGraphEntity) => {\n    router.push(`/knowledge-bases/${kbId}/knowledge-graph-explorer?query=entity:${entity.id}`);\n  };\n\n  return (\n    <>\n      <NextLink href=''>\n        Back\n      </NextLink>\n      <GraphCreateEntity knowledgeBaseId={kbId} onCreated={handleCreateEntity} />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/knowledge-graph-explorer/page.tsx",
    "content": "import { GraphEditor } from '@/components/graph/GraphEditor';\n\nexport default async function KnowledgeGraphExplorerPage(props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const id = parseInt(decodeURIComponent(params.id));\n\n  return (\n    <section className=\"space-y-2\">\n      <GraphEditor knowledgeBaseId={id} />\n    </section>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/layout.tsx",
    "content": "'use client';;\n\nimport { SecondaryNavigatorLayout, SecondaryNavigatorList, SecondaryNavigatorMain } from '@/components/secondary-navigator-list';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { ArrowRightIcon } from '@/components/icons';\nimport { KnowledgeBaseTabs } from '@/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/tabs';\nimport Link from 'next/link';\nimport { Loader2Icon } from 'lucide-react';\nimport type { ReactNode } from 'react';\nimport { use } from \"react\";\nimport { useKnowledgeBase } from '@/components/knowledge-base/hooks';\n\nexport default function KnowledgeBaseLayout(props: { params: Promise<{ id: string }>, children: ReactNode }) {\n  const params = use(props.params);\n\n  const {\n    children\n  } = props;\n\n  const id = parseInt(decodeURIComponent(params.id));\n  const { knowledgeBase } = useKnowledgeBase(id);\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Knowledge Bases', url: '/knowledge-bases', docsUrl: 'https://autoflow.tidb.ai/knowledge-base' },\n          {\n            alert: knowledgeBase?.data_sources_total === 0 ? {\n              variant: 'warning',\n              content: <>\n                <p>This Knowledge Base has no datasource.</p>\n                <Link className=\"underline flex gap-2 items-center\" href={`/knowledge-bases/${id}/data-sources/new`}>\n                  Create Data Source\n                  <ArrowRightIcon className=\"size-4\" />\n                </Link>\n              </>,\n            } : undefined,\n            title: knowledgeBase?.name ?? <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />,\n          },\n        ]}\n      />\n      <SecondaryNavigatorLayout>\n        <SecondaryNavigatorList>\n          <KnowledgeBaseTabs knowledgeBaseId={id} />\n        </SecondaryNavigatorList>\n        <SecondaryNavigatorMain className=\"space-y-4 px-2\">\n          {children}\n        </SecondaryNavigatorMain>\n      </SecondaryNavigatorLayout>\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/page.tsx",
    "content": "import { DocumentsTable } from '@/components/documents/documents-table';\n\nexport default async function KnowledgeBasePage(props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const id = parseInt(decodeURIComponent(params.id));\n\n  return (\n    <>\n      <DocumentsTable knowledgeBaseId={id} />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/settings/page.tsx",
    "content": "import { cachedGetKnowledgeBaseById } from '@/app/(main)/(admin)/knowledge-bases/[id]/api';\nimport { KnowledgeBaseSettingsForm } from '@/components/knowledge-base/knowledge-base-settings-form';\n\nexport default async function KnowledgeBaseSettingsPage(props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const id = parseInt(decodeURIComponent(params.id));\n  const kb = await cachedGetKnowledgeBaseById(id);\n\n  return (\n    <KnowledgeBaseSettingsForm knowledgeBase={kb} />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/tabs.tsx",
    "content": "'use client';\n\nimport { useKnowledgeBase } from '@/components/knowledge-base/hooks';\nimport { SecondaryNavigatorLink } from '@/components/secondary-navigator-list';\n\nexport function KnowledgeBaseTabs ({ knowledgeBaseId }: { knowledgeBaseId: number }) {\n  const { knowledgeBase } = useKnowledgeBase(knowledgeBaseId);\n\n  return (\n    <>\n      <SecondaryNavigatorLink pathname={`/knowledge-bases/${knowledgeBaseId}`}>\n        Documents\n        <span className=\"ml-auto text-xs font-normal text-muted-foreground\">\n          {knowledgeBase?.documents_total}\n        </span>\n      </SecondaryNavigatorLink>\n      <SecondaryNavigatorLink pathname={`/knowledge-bases/${knowledgeBaseId}/data-sources`}>\n        Data Sources\n        <span className=\"ml-auto text-xs font-normal text-muted-foreground\">\n          {knowledgeBase?.data_sources_total}\n        </span>\n      </SecondaryNavigatorLink>\n      <SecondaryNavigatorLink pathname={`/knowledge-bases/${knowledgeBaseId}/index-progress`}>\n        Index Progress\n      </SecondaryNavigatorLink>\n      {/*<TabsTrigger*/}\n      {/*  disabled={true}*/}\n      {/*  value=\"retrieval-tester\"*/}\n      {/*  onClick={() => startTransition(() => {*/}\n      {/*    router.push(`/knowledge-bases/${knowledgeBase.id}/retrieval-tester`);*/}\n      {/*  })}*/}\n      {/*>*/}\n      {/*  Retrieval Tester*/}\n      {/*</TabsTrigger>*/}\n      <SecondaryNavigatorLink pathname={`/knowledge-bases/${knowledgeBaseId}/knowledge-graph-explorer`}>\n        Graph Explorer\n      </SecondaryNavigatorLink>\n      <SecondaryNavigatorLink pathname={`/knowledge-bases/${knowledgeBaseId}/settings`}>\n        Settings\n      </SecondaryNavigatorLink>\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/api.ts",
    "content": "import { getKnowledgeBaseById } from '@/api/knowledge-base';\nimport { cache } from 'react';\n\nexport const cachedGetKnowledgeBaseById = cache(getKnowledgeBaseById);"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/context.tsx",
    "content": "'use client';\n\nimport type { KnowledgeBase } from '@/api/knowledge-base';\nimport { createContext, type ReactNode, useContext } from 'react';\n\nconst KBContext = createContext<KnowledgeBase>(null as any);\n\nexport function KBProvider ({ children, value }: { children: ReactNode, value: KnowledgeBase }) {\n  return (\n    <KBContext.Provider value={value}>\n      {children}\n    </KBContext.Provider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/new/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateKnowledgeBaseForm } from '@/components/knowledge-base/create-knowledge-base-form';\n\nexport default function NewKnowledgeBasePage () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Knowledge Bases', url: '/knowledge-bases' },\n          { title: 'New' },\n        ]}\n      />\n      <CreateKnowledgeBaseForm />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/knowledge-bases/page.tsx",
    "content": "'use client';\n\nimport { KnowledgeBaseCard, KnowledgeBaseCardPlaceholder } from '@/components/knowledge-base/knowledge-base-card';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport KnowledgeBaseEmptyState from '@/components/knowledge-base/empty-state';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { useAllKnowledgeBases } from '@/components/knowledge-base/hooks';\n\nexport default function KnowledgeBasesPage () {\n  const { data: knowledgeBases, isLoading } = useAllKnowledgeBases();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Knowledge Bases', docsUrl: 'https://autoflow.tidb.ai/knowledge-base' },\n        ]}\n      />\n      <NextLink href=\"/knowledge-bases/new\">\n        New Knowledge Base\n      </NextLink>\n      {\n        isLoading\n          ? <div className=\"grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-4\"><KnowledgeBaseCardPlaceholder /></div>\n          : !!knowledgeBases?.length\n            ? <div className=\"grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-4\">\n              {knowledgeBases.map(kb => (\n                <KnowledgeBaseCard key={kb.id} knowledgeBase={kb} />\n              ))}\n            </div>\n            : <KnowledgeBaseEmptyState />\n      }\n    </>\n  );\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/layout.tsx",
    "content": "import { AdminPageLayout } from '@/components/admin-page-layout';\nimport { requireAuth } from '@/lib/auth';\nimport { type ReactNode } from 'react';\n\nexport default async function Layout ({ children }: { children: ReactNode }) {\n  await requireAuth();\n  return (\n    <AdminPageLayout>\n      {children}\n    </AdminPageLayout>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/llms/[id]/page.tsx",
    "content": "import { getLlm } from '@/api/llms';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { UpdateLlmForm } from '@/components/llm/UpdateLLMForm';\n\nexport default async function Page (props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const llm = await getLlm(parseInt(params.id));\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'LLMs', url: '/llms', docsUrl: 'https://autoflow.tidb.ai/llm' },\n          { title: llm.name },\n        ]}\n      />\n      <UpdateLlmForm llm={llm} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/llms/create/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateLLMForm } from '@/components/llm/CreateLLMForm';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\n\nexport default function Page () {\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'LLMs', url: '/llms', docsUrl: 'https://autoflow.tidb.ai/llm' },\n          { title: 'Create' },\n        ]}\n      />\n      <CreateLLMForm\n        transitioning={transitioning}\n        onCreated={llm => {\n          startTransition(() => {\n            router.push(`/llms/${llm.id}`);\n            router.refresh();\n          });\n        }}\n      />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/llms/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { LLMsTable } from '@/components/llm/LLMsTable';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { PlusIcon } from 'lucide-react';\n\nexport default function Page () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'LLMs', docsUrl: 'https://autoflow.tidb.ai/llm' },\n        ]}\n      />\n      <NextLink href=\"/llms/create\">\n        <PlusIcon className=\"size-4\" />\n        New LLM\n      </NextLink>\n      <LLMsTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/reranker-models/[id]/page.tsx",
    "content": "import { getReranker } from '@/api/rerankers';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { UpdateRerankerForm } from '@/components/reranker/UpdateRerankerForm';\n\nexport default async function Page (props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const reranker = await getReranker(parseInt(params.id));\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'Reranker Models', url: '/reranker-models', docsUrl: 'https://autoflow.tidb.ai/reranker-model' },\n          { title: reranker.name },\n        ]}\n      />\n      <UpdateRerankerForm reranker={reranker} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/reranker-models/create/page.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateRerankerForm } from '@/components/reranker/CreateRerankerForm';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\n\nexport default function Page () {\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'Reranker Models', url: '/reranker-models', docsUrl: 'https://autoflow.tidb.ai/reranker-model' },\n          { title: 'Create' },\n        ]}\n      />\n      <CreateRerankerForm\n        transitioning={transitioning}\n        onCreated={reranker => {\n          startTransition(() => {\n            router.push(`/reranker-models/${reranker.id}`);\n            router.refresh();\n          });\n\n        }}\n      />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/reranker-models/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { PlusIcon } from 'lucide-react';\nimport RerankerModelsTable from '@/components/reranker/RerankerModelsTable';\n\nexport default function Page () {\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Models' },\n          { title: 'Reranker Models', docsUrl: 'https://autoflow.tidb.ai/reranker-model' },\n        ]}\n      />\n      <NextLink href=\"/reranker-models/create\">\n        <PlusIcon className=\"size-4\" />\n        New Reranker Model\n      </NextLink>\n      <RerankerModelsTable />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/site-settings/custom_js/page.tsx",
    "content": "import { getAllSiteSettings } from '@/api/site-settings';\nimport { CustomJsSettings } from '@/components/settings/CustomJsSettings';\nimport { WidgetSnippet } from '@/components/settings/WidgetSnippet';\n\nexport default async function CustomJsSettingsPage () {\n  const settings = await getAllSiteSettings();\n\n  return (\n    <>\n      <section className=\"max-w-screen-md space-y-2 mb-8\">\n        <WidgetSnippet />\n        <p className=\"text-muted-foreground text-xs\">Copy this HTML fragment to your page.</p>\n      </section>\n      <CustomJsSettings schema={settings} />\n    </>\n  );\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/site-settings/integrations/page.tsx",
    "content": "import { getAllSiteSettings } from '@/api/site-settings';\nimport { IntegrationsSettings } from '@/components/settings/IntegrationsSettings';\n\nexport default async function LangfuseSettingsPage () {\n  const settings = await getAllSiteSettings();\n\n  return (\n    <>\n      <IntegrationsSettings schema={settings} showPostVerificationSettings={!!process.env.EXPERIMENTAL_MESSAGE_VERIFY_SERVICE} />\n    </>\n  );\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/site-settings/layout.tsx",
    "content": "'use client';\n\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { SecondaryNavigatorLayout, SecondaryNavigatorLink, SecondaryNavigatorList, SecondaryNavigatorMain } from '@/components/secondary-navigator-list';\nimport { type ReactNode } from 'react';\n\nexport default function SiteSettingsLayout ({ children }: { children: ReactNode }) {\n  return (\n    <div className=\"relative\">\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Site Settings' },\n        ]}\n      />\n      <SecondaryNavigatorLayout>\n        <SecondaryNavigatorList>\n          <SecondaryNavigatorLink pathname=\"/site-settings\">\n            Website\n          </SecondaryNavigatorLink>\n          <SecondaryNavigatorLink pathname=\"/site-settings/integrations\">\n            Integrations\n          </SecondaryNavigatorLink>\n          <SecondaryNavigatorLink pathname=\"/site-settings/custom_js\">\n            JS Widget\n          </SecondaryNavigatorLink>\n        </SecondaryNavigatorList>\n        <SecondaryNavigatorMain className=\"px-2\">\n          {children}\n        </SecondaryNavigatorMain>\n      </SecondaryNavigatorLayout>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/site-settings/page.tsx",
    "content": "import { getAllSiteSettings } from '@/api/site-settings';\nimport { WebsiteSettings } from '@/components/settings/WebsiteSettings';\n\nexport default async function SiteSettingsPage () {\n  const settings = await getAllSiteSettings();\n\n  return (\n    <>\n      <WebsiteSettings schema={settings} />\n    </>\n  );\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(admin)/stats/trending/page.tsx",
    "content": "'use client';\n\nimport { getChatOriginTrend, getChatUserTrend } from '@/api/stats';\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { TrendsChart } from '@/components/charts/TrendsChart';\nimport { startOfToday, subDays } from 'date-fns';\nimport { useMemo } from 'react';\nimport useSWR from 'swr';\n\nconst today = startOfToday();\n\nexport default function Page () {\n  const start = subDays(today, 30);\n  const end = today;\n\n  const { data: chatUserTrend } = useSWR(`api.stats.trends.chat-user.${start}-${end}`, () => getChatUserTrend(start, end));\n  const { data: chatOriginTrend } = useSWR(`api.stats.trends.chat-origin.${start}-${end}`, () => getChatOriginTrend(start, end));\n\n  const originKeys = useMemo(() => {\n    if (!chatOriginTrend) {\n      return [];\n    }\n    const set = new Set<string>();\n    chatOriginTrend.values.forEach(item => {\n      Object.keys(item).forEach(key => set.add(key));\n    });\n    set.delete('date');\n    return Array.from(set);\n  }, [chatOriginTrend]);\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Stats' },\n          { title: 'Trending' },\n        ]}\n      />\n      <div className=\"grid grid-cols-2 gap-4\">\n        <div className=\"col-span-2\">\n          {chatUserTrend && <TrendsChart\n            title=\"Chats Users\"\n            description=\"Chats amounts of user and anonymous.\"\n            data={chatUserTrend}\n            dimensions={['user', 'anonymous']}\n            config={{\n              user: { label: 'User', color: 'hsl(var(--chart-1))' },\n              anonymous: { label: 'Anonymous', color: 'hsl(var(--chart-2))' },\n            }}\n          />}\n        </div>\n        <div className=\"col-span-2\">\n          {chatOriginTrend && <TrendsChart\n            title=\"Source\"\n            description=\"Chats amounts from different source\"\n            data={chatOriginTrend}\n            dimensions={originKeys}\n            config={Object.fromEntries(originKeys.map((key, i) => ([key, {\n              label: key,\n              color: `hsl(var(--chart-${(i % 5) + 1}))`,\n            }])))}\n          />}\n        </div>\n      </div>\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(user)/api-keys/page.tsx",
    "content": "'use client';\n\nimport { type ApiKey, type CreateApiKeyResponse, deleteApiKey, listApiKeys } from '@/api/api-keys';\nimport { AdminPageHeading } from '@/components/admin-page-heading';\nimport { CreateApiKeyForm } from '@/components/api-keys/CreateApiKeyForm';\nimport { datetime } from '@/components/cells/datetime';\nimport { CopyButton } from '@/components/copy-button';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { ManagedDialog } from '@/components/managed-dialog';\nimport { ManagedDialogClose } from '@/components/managed-dialog-close';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { Button } from '@/components/ui/button';\nimport { DialogContent, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { DataTableConsumer, useDataTable } from '@/components/use-data-table';\nimport type { CellContext, ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { CircleCheckIcon, PlusIcon, TrashIcon } from 'lucide-react';\nimport { useState } from 'react';\n\nconst helper = createColumnHelper<ApiKey>();\n\nconst mono = (cell: CellContext<any, any>) => <span className=\"font-mono\">{cell.getValue()}</span>;\n\nconst columns = [\n  helper.accessor('api_key_display', { header: 'API Key', cell: mono }),\n  helper.accessor('description', { header: 'Description' }),\n  helper.accessor('created_at', { header: 'Created At', cell: datetime }),\n  helper.accessor('updated_at', { header: 'Updated At', cell: datetime }),\n  helper.display({\n    header: 'Actions',\n    cell: ({ row }) => (\n      <span className=\"flex gap-2 items-center\">\n        <DeleteButton apiKey={row.original} />\n      </span>\n    ),\n  }),\n] as ColumnDef<ApiKey>[];\n\nexport default function ChatEnginesPage () {\n  const [recentlyCreated, setRecentlyCreated] = useState<CreateApiKeyResponse>();\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'API Keys' },\n        ]}\n      />\n      {recentlyCreated && (\n        <Alert className=\"max-w-screen-sm\" variant=\"success\">\n          <CircleCheckIcon />\n          <AlertTitle>API Key created</AlertTitle>\n          <AlertDescription>\n            Please note that your API key will only be shown once. Make sure to save it in a secure location as it won&#39;t be displayed again. Not storing your key safely may result in you needing to generate a new API key.\n          </AlertDescription>\n          <div className=\"my-2\">\n            <p className=\"px-1 py-0.5 rounded bg-accent text-xs flex items-center\">\n              <CopyButton text={recentlyCreated.api_key} autoCopy />\n              <code className=\"text-accent-foreground\">{recentlyCreated.api_key}</code>\n            </p>\n          </div>\n        </Alert>\n      )}\n      <DataTableRemote\n        before={(\n          <ManagedDialog>\n            <DialogTrigger asChild>\n              <Button className=\"ml-auto flex\">\n                Create\n                <PlusIcon className=\"size-4 ml-1\" />\n              </Button>\n            </DialogTrigger>\n            <DialogContent>\n              <DialogHeader>\n                <DialogTitle>Create API Key</DialogTitle>\n              </DialogHeader>\n              <DataTableConsumer>\n                {(table) => (\n                  <ManagedDialogClose>\n                    {close => (\n                      <CreateApiKeyForm\n                        onCreated={data => {\n                          close();\n                          setRecentlyCreated(data);\n                          table?.reload?.();\n                        }}\n                      />\n                    )}\n                  </ManagedDialogClose>\n                )}\n              </DataTableConsumer>\n            </DialogContent>\n          </ManagedDialog>\n        )}\n        columns={columns}\n        apiKey=\"api.api-keys.list\"\n        api={listApiKeys}\n        idColumn=\"id\"\n      />\n    </>\n  );\n}\n\nfunction DeleteButton ({ apiKey }: { apiKey: ApiKey }) {\n  const { reload } = useDataTable();\n\n  return (\n    <DangerousActionButton\n      action={async () => {\n        await deleteApiKey(apiKey.id);\n        reload?.();\n      }}\n      variant=\"ghost\"\n      className=\"text-xs text-destructive hover:text-destructive hover:bg-destructive/20\"\n    >\n      <TrashIcon className=\"w-3 mr-1\" />\n      Delete\n    </DangerousActionButton>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(user)/c/page.tsx",
    "content": "import { AdminPageHeading } from '@/components/admin-page-heading';\nimport { ChatsTable } from '@/components/chat/chats-table';\nimport { requireAuth } from '@/lib/auth';\n\nexport default async function ConversationsListPage () {\n  await requireAuth();\n\n  return (\n    <>\n      <AdminPageHeading\n        breadcrumbs={[\n          { title: 'Conversations History' },\n        ]}\n      />\n      <ChatsTable />\n    </>\n  );\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/app/(main)/(user)/layout.tsx",
    "content": "import { AdminPageLayout } from '@/components/admin-page-layout';\nimport { requireAuth } from '@/lib/auth';\nimport { type ReactNode } from 'react';\n\nexport default async function Layout ({ children }: { children: ReactNode }) {\n  await requireAuth();\n  return (\n    <AdminPageLayout>\n      {children}\n    </AdminPageLayout>\n  );\n}"
  },
  {
    "path": "frontend/app/src/app/(main)/c/[id]/page.tsx",
    "content": "import { type Chat, type ChatMessage, getChat } from '@/api/chats';\nimport { AutoScroll, ManualScrollVoter } from '@/components/auto-scroll';\nimport { Conversation } from '@/components/chat/conversation';\nimport { ErrorCard } from '@/components/error-card';\nimport { Button } from '@/components/ui/button';\nimport { auth } from '@/lib/auth';\nimport { isServerError } from '@/lib/request';\nimport type { Metadata } from 'next';\nimport { cookies } from 'next/headers';\nimport Link from 'next/link';\nimport { notFound } from 'next/navigation';\nimport { cache } from 'react';\n\nconst cachedGetChat = cache((id: string) => getChat(id)\n  .then(res => {\n    return res;\n  })\n  .catch(error => {\n    if (isServerError(error, [404, 422 /* handle not UUID */])) {\n      notFound();\n    } else {\n      return Promise.reject(error);\n    }\n  }));\n\nexport default async function ChatDetailPage(props: { params: Promise<{ id: string }> }) {\n  const params = await props.params;\n  const id = params.id;\n  const me = await auth();\n  const bid = (await cookies()).get('bid')?.value;\n\n  let chat: Chat | undefined;\n  let messages: ChatMessage[];\n\n  try {\n    const detail = await cachedGetChat(id);\n    chat = detail.chat;\n    messages = detail.messages;\n  } catch (error) {\n    if (isServerError(error, 403)) {\n      return (\n        <div className=\"h-screen flex items-center justify-center xl:pr-side bg-accent\">\n          <ErrorCard\n            title=\"Access denied\"\n            message=\"This chat is private\"\n          >\n            <div className=\"flex gap-2 items-center mt-8\">\n              {!me && (\n                <Button asChild>\n                  <Link href=\"/auth/login\">\n                    Login to continue\n                  </Link>\n                </Button>\n              )}\n              <Button variant=\"ghost\" asChild>\n                <Link href=\"/\">\n                  Back to homepage\n                </Link>\n              </Button>\n            </div>\n          </ErrorCard>\n        </div>\n      );\n    }\n    throw error;\n  }\n\n  const shouldOpen = me\n    ? me.id === chat?.user_id\n    : bid === chat?.browser_id;\n\n  return (\n    <div className=\"xl:pr-side\">\n      <AutoScroll edgePixels={10}>\n        <ManualScrollVoter />\n        <Conversation\n          key={chat?.id}\n          chatId={id}\n          open={shouldOpen}\n          chat={chat}\n          history={messages}\n        />\n      </AutoScroll>\n    </div>\n  );\n}\n\nexport async function generateMetadata(props: { params: Promise<{ id: string }> }): Promise<Metadata> {\n  const params = await props.params;\n  try {\n    const chat = await cachedGetChat(params.id);\n\n    return {\n      title: chat.chat.title,\n    };\n  } catch (error) {\n    if (isServerError(error, 403)) {\n      return {};\n    } else {\n      throw error;\n    }\n  }\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/app/(main)/layout.tsx",
    "content": "'use client';\n\nimport { SiteSidebar } from '@/app/(main)/nav';\nimport { SiteHeaderLargeScreen, SiteHeaderSmallScreen } from '@/components/site-header';\nimport { SidebarProvider, SidebarTrigger } from '@/components/ui/sidebar';\nimport { useSettingContext } from '@/components/website-setting-provider';\nimport { cn } from '@/lib/utils';\nimport { ReactNode, useState } from 'react';\n\nexport default function Layout ({ children }: {\n  children: ReactNode\n}) {\n  const [sidebarOpen, setSidebarOpen] = useState(true);\n  const setting = useSettingContext();\n\n  return (\n    <>\n      <SiteHeaderSmallScreen setting={setting} />\n      <SidebarProvider open={sidebarOpen} onOpenChange={setSidebarOpen}>\n        <div className={cn('hidden md:block absolute pl-2.5 top-2.5 md:top-5 md:pl-5 z-10 transition-all ease-linear', sidebarOpen ? 'left-[--sidebar-width]' : 'left-0')}>\n          <SidebarTrigger />\n        </div>\n        <SiteHeaderLargeScreen setting={setting} />\n        <SiteSidebar setting={setting} />\n        <main className=\"flex-1 overflow-x-hidden\">\n          {children}\n        </main>\n      </SidebarProvider>\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/nav.tsx",
    "content": "'use client';\n\nimport { logout } from '@/api/auth';\nimport type { PublicWebsiteSettings } from '@/api/site-settings';\nimport { useAuth } from '@/components/auth/AuthProvider';\nimport { Branding } from '@/components/branding';\nimport { useAllChatEngines } from '@/components/chat-engine/hooks';\nimport { ChatNewDialog } from '@/components/chat/chat-new-dialog';\nimport { ChatsHistory } from '@/components/chat/chats-history';\nimport { useAllKnowledgeBases } from '@/components/knowledge-base/hooks';\nimport { type NavGroup, SiteNav } from '@/components/site-nav';\nimport { useBootstrapStatus } from '@/components/system/BootstrapStatusProvider';\nimport { Avatar, AvatarFallback } from '@/components/ui/avatar';\nimport { Button } from '@/components/ui/button';\nimport { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuTrigger } from '@/components/ui/dropdown-menu';\nimport { Sidebar, SidebarContent, SidebarFooter, SidebarHeader } from '@/components/ui/sidebar';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { useHref } from '@/components/use-href';\nimport { ActivitySquareIcon, AlertTriangleIcon, BinaryIcon, BotMessageSquareIcon, BrainCircuitIcon, CircleDotIcon, CogIcon, ComponentIcon, FileLineChart, HomeIcon, KeyRoundIcon, LibraryBigIcon, LibraryIcon, LogInIcon, MessageCircleQuestionIcon, MessagesSquareIcon, ShuffleIcon } from 'lucide-react';\nimport NextLink from 'next/link';\nimport Link from 'next/link';\nimport { useRouter } from 'next/navigation';\nimport type { ReactNode } from 'react';\n\nexport function SiteSidebar ({ setting }: { setting: PublicWebsiteSettings }) {\n  return (\n    <Sidebar>\n      <SidebarHeader>\n        <Branding setting={setting} />\n      </SidebarHeader>\n      <SidebarContent>\n        <NavContent />\n      </SidebarContent>\n      <SidebarFooter>\n        <NavFooter />\n      </SidebarFooter>\n    </Sidebar>\n  );\n}\n\nfunction NavContent () {\n  const { required, need_migration } = useBootstrapStatus();\n  const href = useHref();\n  const auth = useAuth();\n  const user = auth.me;\n  const isLoggedIn = !!user; // TODO: wait for server\n\n  const disableIfNotAuthenticated = !isLoggedIn ? <><Link className=\"font-semibold underline\" href={`/auth/login?callbackUrl=${encodeURIComponent(href)}`}>Login</Link> to continue</> : false;\n\n  const groups: NavGroup[] = [\n    {\n      items: [\n        { custom: true, key: 'new-chat', children: <ChatNewDialog /> },\n        { href: '/', title: 'Home', icon: HomeIcon, exact: true },\n        { href: '/c', title: 'Conversations', exact: true, icon: MessagesSquareIcon, disabled: disableIfNotAuthenticated },\n        { custom: true, key: 'history', children: <ChatsHistory /> },\n      ],\n    },\n  ];\n\n  if (user?.is_superuser) {\n    groups.push({\n      title: 'Admin',\n      items: [\n        { href: '/stats/trending', title: 'Dashboard', icon: ActivitySquareIcon },\n        {\n          href: '/knowledge-bases',\n          title: 'Knowledge Bases',\n          icon: LibraryBigIcon,\n          details: !required.knowledge_base\n            ? <NavWarningDetails>You need to configure at least one Knowledge Base.</NavWarningDetails>\n            : <KnowledgeBaseNavDetails />,\n        },\n        {\n          href: '/chat-engines',\n          title: 'Chat Engines',\n          icon: BotMessageSquareIcon,\n          details: !!need_migration.chat_engines_without_kb_configured?.length\n            ? <NavWarningDetails>\n              Some ChatEngine need to <a href=\"/releases/0.3.0#manual-migration\" className=\"underline\">configure KnowledgeBase</a>.\n            </NavWarningDetails>\n            : !required.default_chat_engine\n              ? <NavWarningDetails>You need to configure default Chat Engine.</NavWarningDetails>\n              : <ChatEnginesNavDetails />,\n        },\n        {\n          parent: true,\n          key: 'models',\n          title: 'Models',\n          icon: ComponentIcon,\n          details: (!required.default_llm || !required.default_embedding_model) && <NavWarningDetails />,\n          children: [\n            { href: '/llms', title: 'LLMs', icon: BrainCircuitIcon, details: !required.default_llm ? <NavWarningDetails>You need to configure at least one Default LLM.</NavWarningDetails> : undefined },\n            { href: '/embedding-models', title: 'Embedding Models', icon: BinaryIcon, details: !required.default_embedding_model && <NavWarningDetails>You need to configure at least one Default Embedding Model.</NavWarningDetails> },\n            { href: '/reranker-models', title: 'Reranker Models', icon: ShuffleIcon },\n          ],\n        },\n        { href: '/feedbacks', title: 'Feedbacks', icon: MessageCircleQuestionIcon },\n        {\n          parent: true,\n          key: 'evaluation',\n          title: 'Evaluation',\n          icon: FileLineChart,\n          children: [\n            { href: '/evaluation/tasks', title: 'Tasks', icon: CircleDotIcon },\n            { href: '/evaluation/datasets', title: 'Datasets', icon: LibraryIcon },\n          ],\n        },\n        { href: '/site-settings', title: 'Settings', icon: CogIcon },\n      ],\n      sectionProps: { className: 'mt-auto mb-0' },\n    });\n  }\n\n  if (user?.is_superuser) {\n    groups.push({\n      title: 'Account',\n      items: [\n        { href: '/api-keys', title: 'API Keys', icon: KeyRoundIcon },\n      ],\n    });\n  }\n\n  return (\n    <SiteNav groups={groups} />\n  );\n}\n\nfunction NavFooter () {\n  const href = useHref();\n  const user = useAuth().me;\n  const router = useRouter();\n\n  if (!user) {\n    return (\n      <Button variant=\"ghost\" asChild>\n        <NextLink href={`/auth/login?callbackUrl=${encodeURIComponent(href)}`} prefetch={false} className=\"items-center w-full gap-2\">\n          <LogInIcon size=\"1em\" />\n          Login\n        </NextLink>\n      </Button>\n    );\n  }\n  return (\n    <div className=\"flex items-center gap-2\">\n      <DropdownMenu>\n        <DropdownMenuTrigger>\n          <Avatar className=\"border dark:bg-primary bg-primary-foreground p-0.5 w-8 h-8\">\n            {/*{user.image && <AvatarImage src={user.image} />}*/}\n            <AvatarFallback className=\"text-xs\">\n              {/*{user.image ? <Skeleton className=\"w-full h-full\" /> : user.name}*/}\n              {user.email.slice(0, 2)}\n            </AvatarFallback>\n          </Avatar>\n        </DropdownMenuTrigger>\n        <DropdownMenuContent collisionPadding={8} side=\"top\">\n          <DropdownMenuItem onClick={() => {\n            logout().finally(() => {\n              router.refresh();\n            });\n          }}>\n            Sign out\n          </DropdownMenuItem>\n        </DropdownMenuContent>\n      </DropdownMenu>\n      <span className=\"text-sm font-semibold\">\n        {user.email}\n      </span>\n    </div>\n  );\n}\n\nfunction NavWarningDetails ({ children }: { children?: ReactNode }) {\n  if (!children) {\n    return <AlertTriangleIcon className=\"text-warning size-4\" />;\n  }\n  return (\n    <TooltipProvider>\n      <Tooltip>\n        <TooltipTrigger>\n          <AlertTriangleIcon className=\"text-warning size-4\" />\n        </TooltipTrigger>\n        <TooltipContent>\n          {children}\n        </TooltipContent>\n      </Tooltip>\n    </TooltipProvider>\n  );\n}\n\nfunction CountSpan ({ children }: { children?: ReactNode }) {\n  return <span className=\"text-xs opacity-50 font-normal inline-block mr-1\">{children}</span>;\n}\n\nfunction KnowledgeBaseNavDetails () {\n  const { data: knowledgeBases, isLoading } = useAllKnowledgeBases();\n\n  if (isLoading) {\n    return <Skeleton className=\"flex-shrink-0 w-6 h-4\" />;\n  }\n\n  return <CountSpan>{knowledgeBases?.length}</CountSpan>;\n}\n\nfunction ChatEnginesNavDetails () {\n  const { data, isLoading } = useAllChatEngines();\n\n  if (isLoading) {\n    return <Skeleton className=\"flex-shrink-0 w-6 h-4\" />;\n  }\n\n  return <CountSpan>{data?.length}</CountSpan>;\n}\n"
  },
  {
    "path": "frontend/app/src/app/(main)/page.tsx",
    "content": "'use client';\n\nimport { Ask } from '@/components/chat/ask';\nimport { useAsk } from '@/components/chat/use-ask';\nimport { withReCaptcha } from '@/components/security-setting-provider';\nimport { SystemWizardBanner } from '@/components/system/SystemWizardBanner';\nimport { Button } from '@/components/ui/button';\nimport DotPattern from '@/components/ui/dot-pattern';\nimport { useSettingContext } from '@/components/website-setting-provider';\nimport { cn } from '@/lib/utils';\nimport NextLink from 'next/link';\n\nconst security: { google_recaptcha_site_key: string, google_recaptcha: 'v3' | 'enterprise' | '' } | null = null;\n\nexport default function Page () {\n  const { loading, disabled, setEngine, ask, engine } = useAsk();\n  const { homepage_title, description, homepage_example_questions, homepage_footer_links } = useSettingContext();\n\n  return (\n    <div className=\"h-screen relative\">\n      <SystemWizardBanner />\n      <div className=\"lg:h-[calc(100%-var(--ask-referral-height))] h-2/3 p-4 lg:p-0 flex flex-col items-center justify-center gap-4 relative\">\n        <div className='absolute size-full pointer-events-none flex items-center justify-center'>\n          <DotPattern\n            className={cn(\n              '[mask-image:radial-gradient(300px_circle_at_center,white,transparent)]',\n            )}\n          />\n        </div>\n        <h1 className=\"text-2xl sm:text-4xl font-light text-center\">\n          {homepage_title || ''}\n        </h1>\n        <p className=\"font-light dark:text-gray-300 text-gray-500 mb-4 w-4/5 md:w-auto text-center\">\n          {description || ''}\n        </p>\n        <Ask className=\"z-0 px-4 w-full lg:w-2/3\" disabled={disabled} loading={loading} ask={ask} engine={engine} setEngine={setEngine} />\n        {homepage_example_questions && (<ul className=\"z-0 flex gap-2 flex-wrap px-4 w-full lg:w-2/3\">\n          {homepage_example_questions.map((item, index) => (\n            <li key={index}>\n              <Button\n                className=\"g-recaptcha font-normal text-xs\"\n                disabled={loading}\n                variant=\"secondary\"\n                size=\"sm\"\n                onClick={() => {\n                  withReCaptcha({\n                    action: 'ask',\n                    siteKey: security?.google_recaptcha_site_key || '',\n                    mode: security?.google_recaptcha,\n                  }, ({ token, action }) => {\n                    ask(item, {\n                      headers: {\n                        'X-Recaptcha-Token': token,\n                        'X-Recaptcha-Action': action,\n                      },\n                    });\n                  });\n                }}\n              >\n                {item}\n              </Button>\n            </li>\n          ))}\n        </ul>)}\n      </div>\n      <div className=\"lg:h-[var(--ask-referral-height)] h-1/3 flex lg:justify-center justify-end items-center gap-4 lg:flex-row flex-col pb-4 lg:pb-0\" style={{ display: 'auto' }}>\n        {homepage_footer_links?.map(link => (\n          <NextLink key={link.text} href={link.href} target=\"_blank\" className={cn('font-light text-sm hover:underline opacity-50 flex justify-center', isHighlightedLinkText(link.text) && 'font-semibold text-yellow-500 dark:text-yellow-400 opacity-100 underline')}>\n            {trimHighlightedLinkText(link.text)}\n          </NextLink>\n        ))}\n      </div>\n    </div>\n  );\n}\n\nfunction isHighlightedLinkText (text: string) {\n  return text.startsWith('*') && text.endsWith('*')\n}\n\nfunction trimHighlightedLinkText (text: string) {\n  if (isHighlightedLinkText(text)) {\n    return text.slice(1, -1)\n  }\n  return text\n}\n"
  },
  {
    "path": "frontend/app/src/app/RootProviders.tsx",
    "content": "'use client';\n\nimport type { PublicWebsiteSettings } from '@/api/site-settings';\nimport type { BootstrapStatus } from '@/api/system';\nimport { getMe, type MeInfo } from '@/api/users';\nimport { AuthProvider } from '@/components/auth/AuthProvider';\nimport { ChatsProvider } from '@/components/chat/chat-hooks';\nimport { GtagProvider } from '@/components/gtag-provider';\nimport { BootstrapStatusProvider } from '@/components/system/BootstrapStatusProvider';\nimport { Toaster } from '@/components/ui/sonner';\nimport { SettingProvider } from '@/components/website-setting-provider';\nimport { type ExperimentalFeatures, ExperimentalFeaturesProvider } from '@/experimental/experimental-features-provider';\nimport { cn } from '@/lib/utils';\nimport { ThemeProvider } from 'next-themes';\nimport type { ReactNode } from 'react';\nimport useSWR from 'swr';\n\nexport interface RootProvidersProps {\n  me: MeInfo | undefined;\n  children: ReactNode;\n  settings: PublicWebsiteSettings;\n  bootstrapStatus: BootstrapStatus;\n  experimentalFeatures: Partial<ExperimentalFeatures>;\n}\n\nexport function RootProviders ({ me, settings, bootstrapStatus, experimentalFeatures, children }: RootProvidersProps) {\n  const { data, isValidating, isLoading, mutate } = useSWR('api.users.me', getMe, {\n    fallbackData: me,\n    revalidateOnMount: false,\n    revalidateOnFocus: false,\n    errorRetryCount: 0,\n  });\n\n  return (\n    <BootstrapStatusProvider bootstrapStatus={bootstrapStatus}>\n      <ThemeProvider\n        attribute=\"class\"\n        defaultTheme=\"dark\"\n        enableSystem\n        disableTransitionOnChange\n      >\n        <SettingProvider\n          value={settings}>\n          <ExperimentalFeaturesProvider features={experimentalFeatures}>\n            <GtagProvider gtagId={settings.ga_id} configured>\n              <AuthProvider me={data} isLoading={isLoading} isValidating={isValidating} reload={() => mutate(data, { revalidate: true })}>\n                <ChatsProvider>\n                  {children}\n                  <Toaster cn={cn} />\n                </ChatsProvider>\n              </AuthProvider>\n            </GtagProvider>\n          </ExperimentalFeaturesProvider>\n        </SettingProvider>\n      </ThemeProvider>\n    </BootstrapStatusProvider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/api/[[...fallback_placeholder]]/route.ts",
    "content": "import { BASE_URL } from '@/lib/request';\nimport type { NextRequest } from 'next/server';\n\ndeclare global {\n  interface RequestInit {\n    duplex?: 'half' | boolean;\n  }\n}\n\nfunction handler (request: NextRequest) {\n  const base = BASE_URL;\n\n  if (!/^https?:\\/\\//.test(base)) {\n    return Promise.reject(new Error(`BASE_URL must be a http(s) url to proxy requests.`));\n  }\n\n  const newUrl = new URL(originalUrl(request), base);\n\n  const requestHeaders = new Headers(request.headers);\n  requestHeaders.delete('Accept-Encoding');\n  requestHeaders.delete('Host');\n  requestHeaders.delete('X-Invoke-Output');\n  requestHeaders.delete('X-Invoke-Path');\n  requestHeaders.delete('X-Invoke-Query');\n  requestHeaders.delete('X-Middleware-Invoke');\n\n  return fetch(newUrl, {\n    cache: 'no-cache',\n    method: request.method,\n    headers: requestHeaders,\n    body: request.body,\n    duplex: request.body instanceof ReadableStream ? 'half' : 'half',\n  }).then(response => {\n    console.log('[proxy]', request.method, newUrl.toString(), response.status, response.statusText, response.headers.get('Content-Type'));\n    return response;\n  }, error => {\n    console.error('[proxy]', request.method, newUrl.toString(), error);\n    return Promise.reject(error);\n  });\n}\n\nfunction originalUrl (request: NextRequest) {\n  const url = request.nextUrl;\n  const usp = new URLSearchParams(url.searchParams);\n  usp.delete('fallback_placeholder');\n  const search = usp.toString();\n  if (search) {\n    return url.pathname + '?' + search;\n  } else {\n    return url.pathname;\n  }\n}\n\nexport { handler as GET, handler as POST, handler as DELETE, handler as HEAD, handler as PUT, handler as PATCH, handler as OPTIONS };\n\nexport const runtime = 'edge';\n\nexport const dynamic = 'force-dynamic';\n\nexport const maxDuration = 300;\n"
  },
  {
    "path": "frontend/app/src/app/auth/login/page.tsx",
    "content": "import { Signin } from '@/components/signin';\nimport { headers } from 'next/headers';\n\nexport default async function Page() {\n  const referer = (await headers()).get('Referer') ?? undefined;\n\n  return (\n    <div className=\"w-screen h-screen flex items-center justify-center bg-background\">\n      <div className=\"min-w-80 border rounded-lg p-4 bg-card space-y-4\">\n        <h1>\n          Login\n        </h1>\n        <Signin callbackUrl={referer} />\n      </div>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/app/chart-theme.css",
    "content": ":root {\n  --chart-1: 12 76% 61%;\n  --chart-2: 173 58% 39%;\n  --chart-3: 197 37% 24%;\n  --chart-4: 43 74% 66%;\n  --chart-5: 27 87% 67%;\n}\n\n.dark {\n  --chart-1: 220 70% 50%;\n  --chart-2: 160 60% 45%;\n  --chart-3: 30 80% 55%;\n  --chart-4: 280 65% 60%;\n  --chart-5: 340 75% 55%;\n}\n\n"
  },
  {
    "path": "frontend/app/src/app/globals.css",
    "content": "@tailwind base;\n@tailwind components;\n@tailwind utilities;\n\n@layer base {\n  :root {\n    --header-height: 3rem;\n    --body-height: calc(100vh - var(--header-height));\n    --body-padding: 1rem;\n    --content-height: calc(var(--body-height) - var(--body-padding) * 6);\n    --sidebar-width: ;\n    --content-width: calc(100vw - var(--sidebar-width));\n    --ask-referral-height: 4rem;\n    --sidebar-background: 0 0% 98%;\n    --sidebar-foreground: 240 5.3% 26.1%;\n    --sidebar-primary: 240 5.9% 10%;\n    --sidebar-primary-foreground: 0 0% 98%;\n    --sidebar-accent: 240 4.8% 95.9%;\n    --sidebar-accent-foreground: 240 5.9% 10%;\n    --sidebar-border: 220 13% 91%;\n    --sidebar-ring: 217.2 91.2% 59.8%;\n  }\n\n  :root {\n    --background: 0 0% 100%;\n    --foreground: 240 10% 3.9%;\n\n    --card: 0 0% 100%;\n    --card-foreground: 240 10% 3.9%;\n\n    --popover: 0 0% 100%;\n    --popover-foreground: 240 10% 3.9%;\n\n    --primary: 240 5.9% 10%;\n    --primary-foreground: 0 0% 98%;\n\n    --secondary: 240 4.8% 95.9%;\n    --secondary-foreground: 240 5.9% 10%;\n\n    --muted: 240 4.8% 95.9%;\n    --muted-foreground: 240 3.8% 46.1%;\n\n    --accent: 240 4.8% 95.9%;\n    --accent-foreground: 240 5.9% 10%;\n\n    --destructive: 0 84.2% 60.2%;\n    --destructive-foreground: 0 0% 98%;\n\n    --border: 240 5.9% 90%;\n    --input: 240 5.9% 90%;\n    --ring: 240 10% 3.9%;\n\n    --radius: 0.5rem;\n  }\n\n  .dark {\n    --background: 240 10% 3.9%;\n    --foreground: 0 0% 98%;\n\n    --card: 240 10% 3.9%;\n    --card-foreground: 0 0% 98%;\n\n    --popover: 240 10% 3.9%;\n    --popover-foreground: 0 0% 98%;\n\n    --primary: 0 0% 98%;\n    --primary-foreground: 240 5.9% 10%;\n\n    --secondary: 240 3.7% 15.9%;\n    --secondary-foreground: 0 0% 98%;\n\n    --muted: 240 3.7% 15.9%;\n    --muted-foreground: 240 5% 64.9%;\n\n    --accent: 240 3.7% 15.9%;\n    --accent-foreground: 0 0% 98%;\n\n    --destructive: 0 72.2% 50.6%;\n    --destructive-foreground: 0 0% 98%;\n\n    --border: 240 3.7% 15.9%;\n    --input: 240 3.7% 15.9%;\n    --ring: 240 4.9% 83.9%;\n    --sidebar-background: 240 5.9% 10%;\n    --sidebar-foreground: 240 4.8% 95.9%;\n    --sidebar-primary: 224.3 76.3% 48%;\n    --sidebar-primary-foreground: 0 0% 100%;\n    --sidebar-accent: 240 3.7% 15.9%;\n    --sidebar-accent-foreground: 240 4.8% 95.9%;\n    --sidebar-border: 240 3.7% 15.9%;\n    --sidebar-ring: 217.2 91.2% 59.8%;\n  }\n}\n\n@layer base {\n  * {\n    @apply border-border;\n  }\n\n  body {\n    @apply bg-background text-foreground;\n  }\n}\n\n@layer base {\n  :root {\n    --brand1: 221.2 83.2% 53.3%;\n    --brand1-foreground: 210 40% 98%;\n  }\n\n  .dark {\n    --brand1: 217.2 91.2% 59.8%;\n    --brand1-foreground: 222.2 47.4% 11.2%;\n  }\n}\n\n@layer base {\n  :root {\n    --warning: 37.7 92.1% 50.2%;\n    --warning-foreground: 0 0% 98%;\n    --info: 198.6 88.7% 48.4%;\n    --info-foreground: 0 0% 98%;\n    --success: 142.1 70.6% 45.3%;\n    --success-foreground: 0 0% 98%;\n  }\n\n  .dark {\n    --warning: 32.1 94.6% 43.7%;\n    --warning-foreground: 0 0% 98%;\n    --info: 200.4 98% 39.4%;\n    --info-foreground: 0 0% 98%;\n    --success: 142.1 76.2% 36.3%;\n    --success-foreground: 0 0% 98%;\n  }\n}\n\n.grecaptcha-badge {\n  visibility: hidden;\n}\n"
  },
  {
    "path": "frontend/app/src/app/layout.tsx",
    "content": "import { getPublicSiteSettings } from '@/api/site-settings';\nimport { getBootstrapStatus } from '@/api/system';\nimport { RootProviders } from '@/app/RootProviders';\nimport { experimentalFeatures } from '@/experimental/experimental-features';\nimport { auth } from '@/lib/auth';\nimport { GoogleAnalytics } from '@next/third-parties/google';\nimport type { Metadata } from 'next';\nimport { Inter } from 'next/font/google';\nimport Script from 'next/script';\nimport { cache, type ReactNode } from 'react';\n\nimport './globals.css';\nimport './chart-theme.css';\n\nconst inter = Inter({ subsets: ['latin'] });\n\nconst cachedGetSettings = cache(getPublicSiteSettings);\n\nexport async function generateMetadata (): Promise<Metadata> {\n  const { title, description } = await cachedGetSettings();\n  return {\n    title,\n    description,\n    icons: '/favicon.svg',\n  };\n};\n\nexport default async function RootLayout ({\n  children,\n}: Readonly<{\n  children: ReactNode;\n}>) {\n  const [\n    me,\n    settings,\n    bootstrapStatus,\n  ] = await Promise.all([\n    auth(),\n    cachedGetSettings(),\n    getBootstrapStatus(),\n  ]);\n\n  const _experimentalFeatures = experimentalFeatures();\n\n  if (!settings.enable_post_verifications) {\n    _experimentalFeatures.enable_message_post_verification = false;\n  }\n\n  return (\n    <html lang=\"en\" suppressHydrationWarning>\n    <body className={inter.className}>\n    <RootProviders me={me} settings={settings} bootstrapStatus={bootstrapStatus} experimentalFeatures={_experimentalFeatures}>\n      {children}\n    </RootProviders>\n    {settings.ga_id && <GoogleAnalytics gaId={settings.ga_id} />}\n    <Script async src=\"/widget.js\" data-is-main-site=\"true\" />\n    </body>\n    </html>\n  );\n}\n\nexport const dynamic = 'force-dynamic';\n"
  },
  {
    "path": "frontend/app/src/components/admin-page-heading.tsx",
    "content": "'use client';\n\nimport { Breadcrumb, BreadcrumbItem, BreadcrumbLink, BreadcrumbList, BreadcrumbPage, BreadcrumbSeparator } from '@/components/ui/breadcrumb';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { useSettingContext } from '@/components/website-setting-provider';\nimport { AlertCircleIcon, AlertTriangleIcon, CheckCircleIcon, HelpCircleIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { Fragment, type ReactNode } from 'react';\n\nexport interface BreadcrumbItem {\n  title: ReactNode;\n  url?: string;\n  docsUrl?: string;\n  alert?: {\n    variant: 'success' | 'warning' | 'destructive';\n    content: ReactNode;\n  };\n}\n\nexport interface TableHeadingProps {\n  breadcrumbs?: BreadcrumbItem[];\n}\n\nexport function AdminPageHeading ({ breadcrumbs }: TableHeadingProps) {\n  const { title: siteTitle } = useSettingContext();\n  return (\n    <div className=\"mb-2 pl-8\">\n      {breadcrumbs && (\n        <Breadcrumb>\n          <BreadcrumbList>\n            <BreadcrumbLink asChild>\n              <Link href=\"/\">\n                {siteTitle}\n              </Link>\n            </BreadcrumbLink>\n            {breadcrumbs.map((item, index) => (\n              <Fragment key={index}>\n                <BreadcrumbSeparator />\n                <BreadcrumbItem>\n                  {item.alert && <TooltipProvider>\n                    <Tooltip>\n                      <TooltipTrigger asChild>\n                        {item.alert.variant === 'success'\n                          ? <CheckCircleIcon className=\"text-success size-4\" />\n                          : item.alert.variant === 'warning'\n                            ? <AlertTriangleIcon className=\"text-warning size-4\" />\n                            : item.alert.variant === 'destructive'\n                              ? <AlertTriangleIcon className=\"text-destructive size-4\" />\n                              : <AlertCircleIcon className=\"text-muted-foreground size-4\" />}\n                      </TooltipTrigger>\n                      <TooltipContent align='start'>\n                        {item.alert.content}\n                      </TooltipContent>\n                    </Tooltip>\n                  </TooltipProvider>}\n                  {item.url\n                    ? <BreadcrumbLink asChild><Link href={item.url}>{item.title}</Link></BreadcrumbLink>\n                    : index === breadcrumbs.length - 1\n                      ? <BreadcrumbPage>{item.title}</BreadcrumbPage>\n                      : <span>{item.title}</span>}\n                  {item.docsUrl\n                    ? <a href={item.docsUrl} target=\"_blank\"><HelpCircleIcon className=\"size-4\" /></a>\n                    : undefined}\n                </BreadcrumbItem>\n              </Fragment>\n            ))}\n          </BreadcrumbList>\n        </Breadcrumb>\n      )}\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/admin-page-layout.tsx",
    "content": "import type { ReactNode } from 'react';\n\nexport function AdminPageLayout ({ children }: { children: ReactNode }) {\n  return (\n    <div className=\"p-6 space-y-6 h-[calc(100%-2rem)]\">\n      {children}\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/api-keys/CreateApiKeyForm.tsx",
    "content": "import { createApiKey, type CreateApiKeyResponse } from '@/api/api-keys';\nimport { FormInput } from '@/components/form/control-widget';\nimport { withCreateEntityForm } from '@/components/form/create-entity-form';\nimport { z } from 'zod';\n\nconst schema = z.object({\n  description: z.string(),\n});\n\nexport interface CreateApiKeyFormProps {\n  onCreated?: (data: CreateApiKeyResponse) => void;\n}\n\nconst FormImpl = withCreateEntityForm(schema, createApiKey, {\n  submitTitle: 'Create API Key',\n  submittingTitle: 'Creating API Key...',\n});\n\nexport function CreateApiKeyForm ({ onCreated }: CreateApiKeyFormProps) {\n  return (\n    <FormImpl onCreated={onCreated}>\n      <FormImpl.Basic name=\"description\" label=\"API Key Description\">\n        <FormInput />\n      </FormImpl.Basic>\n    </FormImpl>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/auth/AuthProvider.tsx",
    "content": "import type { MeInfo } from '@/api/users';\nimport { createContext, type ReactNode, useContext } from 'react';\n\nexport interface AuthContextValues {\n  me: MeInfo | undefined;\n  isLoading: boolean;\n  isValidating: boolean;\n  reload: () => void;\n}\n\nconst AuthContext = createContext<AuthContextValues>({ me: undefined, isLoading: false, isValidating: false, reload: () => {}, });\n\nexport function AuthProvider ({ children, ...context }: AuthContextValues & { children: ReactNode }) {\n  return (\n    <AuthContext.Provider value={context}>\n      {children}\n    </AuthContext.Provider>\n  );\n}\n\nexport function useAuth () {\n  return useContext(AuthContext);\n}\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/auto-scroll.stories.tsx",
    "content": "import type { Meta, StoryObj } from '@storybook/react';\nimport { type RefObject, useEffect, useRef, useState } from 'react';\nimport { AutoScroll } from './auto-scroll';\nimport { ManualScrollVoter } from './manual-scroll-voter';\nimport { useRequestScroll } from './use-request-scroll';\n\nconst meta = {\n  title: 'Components/AutoScroll',\n  subcomponents: {},\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  argTypes: {},\n  args: {},\n} satisfies Meta<any>;\n\nexport default meta;\n\ntype Story = StoryObj<typeof meta>\n\nexport const Container: Story = {\n  args: {},\n  render: () => {\n    const [count, setCount] = useState(0);\n    const [target, setTarget] = useState<HTMLDivElement | null>(null);\n\n    useEffect(() => {\n      const interval = setInterval(() => {\n        setCount(count => count + 1);\n      }, 1000);\n\n      return () => {\n        clearInterval(interval);\n      };\n    }, []);\n\n    return (\n      <div ref={setTarget} style={{ minWidth: 250, height: 500, overflow: 'scroll' }}>\n        <AutoScroll target={target}>\n          <ManualScrollVoter />\n          <AutoScrollByObservingChildren target={{ current: target }} />\n          {Array(count).fill(0).map((_, i) => (\n            <div key={i} style={{ margin: 20, padding: 20, height: 200, background: '#c00000' }}></div>\n          ))}\n        </AutoScroll>\n      </div>\n    );\n  },\n};\n\nexport const Document: Story = {\n  args: {},\n  render: () => {\n    const [count, setCount] = useState(0);\n    const ref = useRef<HTMLDivElement>(null);\n\n    useEffect(() => {\n      const interval = setInterval(() => {\n        setCount(count => count + 1);\n      }, 1000);\n\n      return () => {\n        clearInterval(interval);\n      };\n    }, []);\n\n    return (\n      <AutoScroll>\n        <ManualScrollVoter />\n        <AutoScrollByObservingChildren target={ref} />\n        <div ref={ref}>\n          {Array(count).fill(0).map((_, i) => (\n            <div key={i} style={{ margin: 20, padding: 20, height: 200, background: '#c00000' }}></div>\n          ))}\n        </div>\n      </AutoScroll>\n    );\n  },\n};\n\nfunction AutoScrollByObservingChildren ({ target }: { target: RefObject<HTMLElement | null> }) {\n  const requestScroll = useRequestScroll();\n\n  useEffect(() => {\n    const el = target.current;\n    if (el) {\n\n      const mo = new MutationObserver(() => {\n        requestScroll('bottom');\n      });\n\n      mo.observe(el, { childList: true });\n\n      return () => {\n        mo.disconnect();\n      };\n    }\n  }, [target.current]);\n\n  return null;\n}\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/auto-scroll.tsx",
    "content": "'use client';\n\nimport { type MutableRefObject, type ReactNode, useCallback, useEffect, useRef, useState } from 'react';\nimport { AutoScrollContext, ScrollEdge, ScrollHandler } from './context';\n\nexport interface AutoScrollProps {\n  /**\n   * undefined - Window\n   * null - no scroll target\n   */\n  target?: HTMLElement | null;\n  children: ReactNode;\n  edgePixels?: number;\n}\n\nexport function AutoScroll ({ target, edgePixels = 0, children }: AutoScrollProps) {\n  const [map, setMap] = useState<Map<string, boolean>>(() => new Map());\n  const [handlers, setHandlers] = useState<MutableRefObject<ScrollHandler>[]>([]);\n  const lastScrollPositionInfo = useRef<{ left: number, top: number }>({ left: 0, top: 0 });\n  const targetRef = useRef(target);\n\n  const currentHandlers = useRef(handlers);\n  useEffect(() => {\n    currentHandlers.current = handlers;\n    targetRef.current = target;\n  });\n\n  const registerVoter = useCallback((id: string, defaultScroll: boolean) => {\n    setMap(map => new Map(map).set(id, defaultScroll));\n  }, []);\n\n  const unregisterVoter = useCallback((id: string) => {\n    setMap(map => {\n      map = new Map(map);\n      map.delete(id);\n      return map;\n    });\n  }, []);\n\n  const voteAutoScroll = useCallback((id: string) => {\n    setMap(map => {\n      if (map.get(id)) {\n        return map;\n      }\n      return new Map(map).set(id, true);\n    });\n  }, []);\n\n  const cancelVoteAutoScroll = useCallback((id: string) => {\n    setMap(map => {\n      if (map.get(id) === false) {\n        return map;\n      }\n      return new Map(map).set(id, false);\n    });\n  }, []);\n\n  const useScroll = useCallback(function useScroll (handler: ScrollHandler) {\n    const currentRef = useRef(handler);\n\n    useEffect(() => {\n      currentRef.current = handler;\n    });\n\n    useEffect(() => {\n      setHandlers(handlers => [...handlers, currentRef]);\n\n      return () => {\n        setHandlers(handlers => handlers.filter(handler => handler !== currentRef));\n      };\n    }, []);\n    return;\n  }, []);\n\n  useEffect(() => {\n    if (target === null) {\n      return;\n    }\n\n    const scrollTarget = target ?? window;\n    if (scrollTarget) {\n      const measureTarget = scrollTarget === window ? document.documentElement : scrollTarget;\n      const eventTarget = scrollTarget;\n\n      lastScrollPositionInfo.current = {\n        left: measureTarget.scrollLeft,\n        top: measureTarget.scrollTop,\n      };\n\n      const getSize = () => {\n        if (scrollTarget === window) {\n          return {\n            width: window.innerWidth,\n            height: window.innerHeight,\n          };\n        } else {\n          return measureTarget.getBoundingClientRect();\n        }\n      };\n\n      const handleScroll = (event: Event) => {\n        const { scrollLeft, scrollTop, scrollHeight, scrollWidth } = measureTarget;\n        const { width, height } = getSize();\n\n        const x = scrollLeft - lastScrollPositionInfo.current.left;\n        const y = scrollTop - lastScrollPositionInfo.current.top;\n\n        const reachLeft = scrollLeft <= edgePixels;\n        const reachRight = scrollLeft + width >= scrollWidth - edgePixels;\n        const reachTop = scrollTop <= edgePixels;\n        const reachBottom = scrollTop + height >= scrollHeight - edgePixels;\n\n        lastScrollPositionInfo.current = {\n          left: scrollLeft,\n          top: scrollTop,\n        };\n\n        currentHandlers.current.forEach(ref => {\n          ref.current({\n            x, y, reachLeft, reachTop, reachRight, reachBottom,\n          });\n        });\n      };\n\n      eventTarget.addEventListener('scroll', handleScroll, { passive: true });\n\n      return () => {\n        eventTarget.removeEventListener('scroll', handleScroll);\n      };\n    }\n  }, [target, edgePixels]);\n\n  const votes = Array.from(map.values());\n  const shouldAutoScroll = votes.length > 0 && votes.indexOf(false) === -1;\n  const shouldAutoScrollRef = useRef(shouldAutoScroll);\n\n  useEffect(() => {\n    shouldAutoScrollRef.current = shouldAutoScroll;\n  });\n\n  const requestScroll = useCallback((edge: ScrollEdge) => {\n    const target = targetRef.current;\n\n    if (target === null) {\n      return;\n    }\n\n    const scrollTarget = target ?? document.documentElement;\n    if (shouldAutoScrollRef.current && scrollTarget) {\n      switch (edge) {\n        case 'top':\n          scrollTarget.scrollTo({ top: 0, behavior: 'smooth' });\n          break;\n        case 'left':\n          scrollTarget.scrollTo({ left: 0, behavior: 'smooth' });\n          break;\n        case 'bottom':\n          scrollTarget.scrollTo({ top: scrollTarget.scrollHeight, behavior: 'smooth' });\n          break;\n        case 'right':\n          scrollTarget.scrollTo({ left: scrollTarget.scrollWidth, behavior: 'smooth' });\n          break;\n      }\n    }\n  }, []);\n\n  return (\n    <AutoScrollContext.Provider\n      value={{\n        useScroll,\n        registerVoter,\n        unregisterVoter,\n        voteAutoScroll,\n        cancelVoteAutoScroll,\n        requestScroll,\n      }}>\n      {children}\n    </AutoScrollContext.Provider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/context.ts",
    "content": "'use client';\n\nimport { createContext } from 'react';\n\nexport type ScrollInfo = {\n  x: number\n  y: number\n  reachLeft: boolean\n  reachTop: boolean\n  reachRight: boolean\n  reachBottom: boolean\n}\nexport type ScrollEdge = 'top' | 'left' | 'right' | 'bottom';\nexport type ScrollHandler = (info: ScrollInfo) => void;\n\nexport interface AutoScrollContextValues {\n  registerVoter (id: string, defaultScroll: boolean): void;\n\n  unregisterVoter (id: string): void;\n\n  voteAutoScroll (id: string): void;\n\n  cancelVoteAutoScroll (id: string): void;\n\n  useScroll (handler: ScrollHandler): void;\n\n  requestScroll (edge: ScrollEdge): void;\n}\n\nexport const AutoScrollContext = createContext<AutoScrollContextValues>({\n  registerVoter (id: string, defaultScroll: boolean) {},\n  unregisterVoter (id: string) {},\n  voteAutoScroll (id: string) {},\n  cancelVoteAutoScroll (id: string) {},\n  useScroll () {},\n  requestScroll () {},\n});\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/index.ts",
    "content": "export * from './manual-scroll-voter';\nexport * from './auto-scroll';\nexport * from './use-auto-scroll-voter';\nexport * from './use-request-scroll';\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/manual-scroll-voter.tsx",
    "content": "'use client';\n\nimport { useAutoScrollVoter } from './use-auto-scroll-voter';\n\n/**\n * When scrolling up, stop auto scroll.\n *\n * When scrolled down to bottom, start auto scroll.\n */\nexport function ManualScrollVoter () {\n  const { useScroll, voteAutoScroll, cancelVoteAutoScroll } = useAutoScrollVoter(true);\n\n  useScroll((info) => {\n    const { y, reachBottom } = info;\n    if (y > 0 && reachBottom) {\n      voteAutoScroll();\n    } else if (y < 0) {\n      cancelVoteAutoScroll();\n    }\n  });\n\n  return null;\n}\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/use-auto-scroll-voter.ts",
    "content": "'use client';\n\nimport { AutoScrollContext } from './context';\nimport { useContext, useEffect, useId, useMemo } from 'react';\n\nexport function useAutoScrollVoter (defaultScroll: boolean) {\n  const id = useId();\n  const { useScroll, registerVoter, unregisterVoter, voteAutoScroll, cancelVoteAutoScroll } = useContext(AutoScrollContext);\n\n  useEffect(() => {\n    registerVoter(id, defaultScroll);\n    return () => {\n      unregisterVoter(id);\n    };\n  }, [id]);\n\n  const _voteAutoScroll = useMemo(() => {\n    return () => voteAutoScroll(id);\n  }, [id]);\n\n  const _cancelVoteAutoScroll = useMemo(() => {\n    return () => cancelVoteAutoScroll(id);\n  }, [id]);\n\n  return {\n    useScroll,\n    voteAutoScroll: _voteAutoScroll,\n    cancelVoteAutoScroll: _cancelVoteAutoScroll,\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/components/auto-scroll/use-request-scroll.ts",
    "content": "'use client';\n\nimport { useContext } from 'react';\nimport { AutoScrollContext } from './context';\n\nexport function useRequestScroll () {\n  const { requestScroll } = useContext(AutoScrollContext);\n\n  return requestScroll;\n}\n"
  },
  {
    "path": "frontend/app/src/components/branding.tsx",
    "content": "import type { PublicWebsiteSettings } from '@/api/site-settings';\nimport Link from 'next/link';\n\nexport function Branding ({ setting }: { setting: PublicWebsiteSettings }) {\n  return (\n    <Link className=\"flex items-center justify-start\" href='/'>\n      <img className=\"h-8 dark:hidden\" src={setting.logo_in_light_mode} alt=\"logo\" />\n      <img className=\"h-8 hidden dark:block\" src={setting.logo_in_dark_mode} alt=\"logo\" />\n    </Link>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/actions.tsx",
    "content": "import { DangerousActionButton, type DangerousActionButtonProps } from '@/components/dangerous-action-button';\nimport { buttonVariants } from '@/components/ui/button';\nimport { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuLabel, DropdownMenuSeparator, DropdownMenuTrigger } from '@/components/ui/dropdown-menu';\nimport { useDataTable } from '@/components/use-data-table';\nimport { cn } from '@/lib/utils';\nimport type { CellContext } from '@tanstack/react-table';\nimport { EllipsisIcon, Loader2Icon } from 'lucide-react';\nimport type { AppRouterInstance } from 'next/dist/shared/lib/app-router-context.shared-runtime';\nimport { useRouter } from 'next/navigation';\nimport { type Dispatch, type ReactNode, type SetStateAction, type TransitionStartFunction, useState, useTransition } from 'react';\n\nexport interface CellAction {\n  type?: 'button' | 'label' | 'separator';\n  key?: string | number;\n  icon?: ReactNode;\n  title?: ReactNode;\n  disabled?: boolean;\n  dangerous?: Pick<DangerousActionButtonProps, 'dialogDescription' | 'dialogTitle'>;\n  action?: (context: ActionUIContext) => Promise<void> | void;\n}\n\nexport interface ActionUIContext {\n  table: ReturnType<typeof useDataTable>;\n  startTransition: TransitionStartFunction;\n  router: AppRouterInstance;\n  dropdownOpen: boolean;\n  setDropdownOpen: Dispatch<SetStateAction<boolean>>;\n}\n\nexport function actions<Row> (items: (row: Row) => CellAction[]) {\n  // eslint-disable-next-line react/display-name\n  return (props: CellContext<Row, any>) => {\n    const [open, setOpen] = useState(false);\n    const actionItems = items(props.row.original);\n\n    return (\n      <DropdownMenu open={open} onOpenChange={setOpen}>\n        <DropdownMenuTrigger className={cn(buttonVariants({ variant: 'ghost', size: 'icon' }), 'text-muted-foreground p-1 size-6')} disabled={actionItems.length === 0}>\n          <EllipsisIcon className=\"size-4\" />\n        </DropdownMenuTrigger>\n        <DropdownMenuContent align=\"end\">\n          {actionItems.map((item, index) => {\n            if (item.type === 'label') {\n              return <DropdownMenuLabel key={item.key ?? index}>{item.title}</DropdownMenuLabel>;\n            } else if (item.type === 'separator') {\n              return <DropdownMenuSeparator key={item.key ?? index} />;\n            } else {\n              return <Action key={item.key ?? index} item={item} open={open} setOpen={setOpen} />;\n            }\n          })}\n        </DropdownMenuContent>\n      </DropdownMenu>\n    );\n  };\n}\n\nfunction Action ({ item, open, setOpen }: { item: CellAction, open: boolean, setOpen: Dispatch<SetStateAction<boolean>> }) {\n  const table = useDataTable();\n  const [busy, setBusy] = useState(false);\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n\n  const onAction = async () => {\n    try {\n      setBusy(true);\n      await item?.action?.({ startTransition, router, table, dropdownOpen: open, setDropdownOpen: setOpen });\n    } finally {\n      setBusy(false);\n    }\n  };\n\n  let el = (\n    <DropdownMenuItem\n      className={cn('gap-2 cursor-pointer disabled:cursor-not-allowed text-xs', item.dangerous && 'text-destructive focus:bg-destructive/10 focus:text-destructive')}\n      disabled={item.disabled || transitioning}\n      onSelect={item.dangerous\n        ? (event) => {\n          event.preventDefault();\n        }\n        : (event) => {\n          event.preventDefault();\n          void onAction();\n        }}\n    >\n      {item.icon ? (busy || transitioning) ? <Loader2Icon className=\"size-3 animate-spin repeat-infinite\" /> : item.icon : null}\n      {item.title}\n    </DropdownMenuItem>\n  );\n\n  if (item.dangerous) {\n    el = (\n      <DangerousActionButton {...item.dangerous} action={onAction} asChild>\n        {el}\n      </DangerousActionButton>\n    );\n  }\n\n  return el;\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/boolean.tsx",
    "content": "import type { CellContext } from '@tanstack/react-table';\nimport { CheckIcon, XIcon } from 'lucide-react';\n\nexport function boolean (props: CellContext<any, boolean | undefined | null>) {\n  const bool = props.getValue();\n\n  if (bool == null) {\n    return <span className=\"text-muted-foreground\">-</span>;\n  }\n\n  if (bool) {\n    return (\n      <span className=\"text-success inline-flex gap-1 items-center\">\n        <CheckIcon className=\"size-4\" />\n        Yes\n      </span>\n    );\n  } else {\n    return (\n      <span className=\"text-muted-foreground inline-flex gap-1 items-center\">\n        <XIcon className=\"size-4\" />\n        No\n      </span>\n    );\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/datetime.tsx",
    "content": "import type { CellContext } from '@tanstack/react-table';\nimport { format } from 'date-fns';\n\nexport function datetime (props: CellContext<any, Date | null | undefined>) {\n  const date = props.getValue();\n\n  if (!date) {\n    return '-';\n  }\n\n  if (Number.isNaN(date.getTime())) {\n    return 'Invalid Date';\n  }\n\n  return format(date, 'yyyy-MM-dd HH:mm:ss');\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/error-message.tsx",
    "content": "import { PythonViewer } from '@/components/py-viewer';\nimport { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport type { CellContext } from '@tanstack/react-table';\n\nexport function errorMessageCell<Row> (trimLength = 25) {\n  return function ErrorMessageCell (context: CellContext<Row, string | null | undefined>) {\n    return <AutoErrorMessagePopper trimLength={trimLength}>{context.getValue() ?? '-'}</AutoErrorMessagePopper>;\n  };\n}\n\nexport function AutoErrorMessagePopper ({ trimLength = 25, children }: { trimLength?: number, children: string | null }) {\n  if (!children || children.length <= trimLength) {\n    return children;\n  }\n\n  const shortcut = children.slice(0, trimLength);\n\n  return (\n    <Dialog>\n      <DialogTrigger>\n        {shortcut}{'... '}\n        <span className=\"text-muted-foreground\">\n          ({children.length + ' characters'})\n        </span>\n      </DialogTrigger>\n      <DialogContent className=\"max-w-screen-lg h-[80vh]\">\n        <DialogHeader>\n          <DialogTitle>\n            Error Message\n          </DialogTitle>\n          <DialogDescription className=\"sr-only\" />\n        </DialogHeader>\n        <div className=\"size-full overflow-scroll\">\n          <PythonViewer value={children} />\n        </div>\n      </DialogContent>\n    </Dialog>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/link.tsx",
    "content": "import type { CellContext } from '@tanstack/react-table';\nimport Link from 'next/link';\n\nexport interface LinkCellProps<Row> {\n  icon?: React.ReactNode;\n  url?: (row: Row) => string;\n  text?: (row: Row) => string;\n  truncate?: boolean;\n  truncate_length?: number;\n}\n\nconst format_link = (url: string, maxLength: number = 30): string => {\n  if (!url || url.length <= maxLength) return url;\n  const start = url.substring(0, maxLength / 2);\n  const end = url.substring(url.length - maxLength / 2);\n  return `${start}...${end}`;\n};\n\nexport function link<Row> ({ icon, url, text, truncate, truncate_length }: LinkCellProps<Row>) {\n  // eslint-disable-next-line react/display-name\n  return (context: CellContext<Row, any>) => {\n    const href_value = url ? url(context.row.original) : String(context.getValue());\n    const text_value = text ? text(context.row.original) : String(context.getValue());\n    const display_text = truncate ? format_link(text_value, truncate_length) : text_value;\n\n    return <Link\n      className=\"underline font-mono flex items-center gap-1\"\n      href={href_value}>\n      {icon} {display_text}\n    </Link>\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/metadata.tsx",
    "content": "import { ThemedStyle } from '@/components/themed-style';\nimport { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';\nimport type { CellContext } from '@tanstack/react-table';\nimport JsonView from '@uiw/react-json-view';\nimport { darkTheme } from '@uiw/react-json-view/dark';\nimport { lightTheme } from '@uiw/react-json-view/light';\nimport { AlignLeftIcon, BracesIcon, BracketsIcon } from 'lucide-react';\nimport type { ReactElement, ReactNode } from 'react';\n\nexport const metadataCell = (props: CellContext<any, any>) => {\n  const metadata = props.getValue();\n\n  if (metadata == null) {\n    return <pre className=\"text-xs\">(nul)</pre>;\n  }\n\n  let icon: ReactElement | null;\n  let text: ReactNode;\n\n  if (typeof metadata === 'object') {\n    if (metadata instanceof Array) {\n      icon = <BracketsIcon className=\"w-4 h-4\" />;\n      text = <span className=\"text-muted-foreground\">{`${metadata.length} items`}</span>;\n    } else {\n      icon = <BracesIcon className=\"w-4 h-4\" />;\n      text = <span className=\"text-muted-foreground\">{`${Object.keys(metadata).length} keys`}</span>;\n    }\n  } else {\n    const stringValue = String(metadata);\n    if (stringValue.length < 25) {\n      return stringValue;\n    }\n    icon = <AlignLeftIcon className=\"w-4 h-4\" />;\n    text = <span className=\"text-muted-foreground\">{typeof metadata}</span>;\n  }\n\n  return (\n    <Popover modal>\n      <PopoverTrigger className=\"inline-flex gap-1 items-center\">\n        {icon} {text}\n      </PopoverTrigger>\n      <PopoverContent side=\"bottom\" className=\"w-96 max-h-72 overflow-auto scroll-smooth\">\n        <ThemedStyle dark={darkTheme} light={lightTheme}>\n          <JsonView value={metadata} collapsed={2} />\n        </ThemedStyle>\n      </PopoverContent>\n    </Popover>\n  );\n};\n"
  },
  {
    "path": "frontend/app/src/components/cells/mono.tsx",
    "content": "import type { CellContext } from '@tanstack/react-table';\n\nexport const mono = (cell: CellContext<any, any>) => <span className=\"font-mono\">{String(cell.getValue())}</span>;\n"
  },
  {
    "path": "frontend/app/src/components/cells/percent.tsx",
    "content": "import { cn } from '@/lib/utils';\nimport * as ProgressPrimitive from '@radix-ui/react-progress';\nimport type { CellContext } from '@tanstack/react-table';\nimport * as React from 'react';\nimport { type CSSProperties } from 'react';\n\ninterface PercentCellConfig {\n  multiply?: number;\n  colorStops?: {\n    checkpoint: number\n    color: string\n  }[];\n}\n\nexport const percent = (\n  cell: CellContext<any, number | undefined | null>,\n  { multiply = 100, colorStops }: PercentCellConfig = {},\n) => (\n  <span className=\"flex items-center gap-1\">\n    <ProgressPrimitive.Root\n      className={cn(\n        'relative overflow-hidden rounded-full bg-secondary flex-shrink-0 w-16 h-2',\n      )}\n    >\n      <ProgressPrimitive.Indicator\n        className=\"h-full w-full flex-1 bg-primary transition-all\"\n        style={{\n          transform: `translateX(-${100 - ((cell.getValue() ?? 0) * multiply)}%)`,\n          ...getStyle(cell.getValue() ?? 0, colorStops),\n        }}\n      />\n    </ProgressPrimitive.Root>\n    <code className=\"text-xs\">\n      {cell.getValue()?.toFixed(2) ?? '-'}\n    </code>\n  </span>\n);\n\nfunction getStyle (value: number, stops: {\n  checkpoint: number\n  color: string\n}[] | undefined): CSSProperties {\n  if (!stops || stops.length === 0) {\n    return {};\n  }\n  let from = stops.findLast(stop => stop.checkpoint <= value)!;\n  let to = stops.find(stop => stop.checkpoint >= value)!;\n\n  if (!from || !to) {\n    return {};\n  }\n\n  if (from.checkpoint === to.checkpoint) {\n    return {\n      backgroundColor: from.color,\n    };\n  }\n\n  const p1 = (value - from.checkpoint) / (to.checkpoint - from.checkpoint) * 100;\n\n  return {\n    backgroundColor: `color-mix(in srgb, ${from.color} ${100 - p1}%, ${to.color} ${p1}%)`,\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/components/cells/reference.tsx",
    "content": "'use client';\n\nimport Link from 'next/link';\n\nexport function DatasourceCell ({ id, name }: { id: number, name: string }) {\n  return <span>{name}</span>;\n}\n\nexport function KnowledgeBaseCell ({ id, name }: { id?: number, name?: string }) {\n  if (id == null) {\n    return <span className=\"text-muted-foreground\">-</span>;\n  }\n  return <Link className=\"underline\" href={`/knowledge-bases/${id}`}>{name ?? 'Unnamed'}</Link>;\n}\n"
  },
  {
    "path": "frontend/app/src/components/charts/IndexProgressChart.stories.tsx",
    "content": "import { IndexProgressChart, IndexProgressChartPlaceholder } from '@/components/charts/IndexProgressChart';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport type { FC } from 'react';\n\nconst valueType = {\n  name: 'number',\n  required: false,\n} as const;\n\nconst meta = {\n  title: 'Components/Charts/IndexProgressChart',\n  component: IndexProgressChart,\n  subcomponents: {\n    IndexProgressChartPlaceholder: IndexProgressChartPlaceholder as FC<unknown>,\n  },\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ minWidth: 250 }}>\n        <Story />\n      </div>\n    ),\n  ],\n  argTypes: {},\n  args: {},\n} satisfies Meta<typeof IndexProgressChart>;\n\nexport default meta;\n\ntype Story = StoryObj<typeof meta>\n\nexport const Default: Story = {\n  args: {\n    title: 'Title',\n    description: 'Description',\n    label: 'Count',\n    data: {\n      failed: 23,\n      completed: 120,\n      not_started: 2,\n    },\n  },\n};\n\nexport const Placeholder: StoryObj<typeof IndexProgressChartPlaceholder> = {\n  args: {\n    title: 'Title',\n    description: 'Description',\n    label: 'Count',\n  },\n  render ({ ...args }) {\n    return <IndexProgressChartPlaceholder {...args} />;\n  },\n};\n"
  },
  {
    "path": "frontend/app/src/components/charts/IndexProgressChart.tsx",
    "content": "'use client';\n\nimport type { IndexProgress } from '@/api/rag';\n\nimport { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';\nimport { ChartConfig, ChartContainer, ChartTooltip, ChartTooltipContent } from '@/components/ui/chart';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport * as React from 'react';\nimport { type ReactNode, useMemo } from 'react';\nimport { Label, Pie, PieChart } from 'recharts';\n\nconst color_error = '#ef4444';\nconst color_succeed = '#22c55e';\nconst color_in_progress = '#3b82f6';\nconst color_pending = '#71717a';\nconst color_blank = '#71717a80';\nconst color_placeholder = '#71717a40';\n\nconst chartConfig = {\n  total: {\n    label: 'Total',\n  },\n  completed: {\n    label: 'Completed',\n    color: color_succeed,\n  },\n  pending: {\n    label: 'Pending',\n    color: color_pending,\n  },\n  running: {\n    label: 'Running',\n    color: color_in_progress,\n  },\n  failed: {\n    label: 'Failed',\n    color: color_error,\n  },\n  not_started: {\n    label: 'Not Started',\n    color: color_blank,\n  },\n} satisfies ChartConfig;\n\nexport function IndexProgressChart ({ title, description, label, data }: { title: string, description?: string, label: ReactNode, data: IndexProgress }) {\n  const total = React.useMemo(() => {\n    return Object.values(data).reduce((a, b) => a + b, 0);\n  }, [data]);\n\n  const chartData = useMemo(() => {\n\n    return [\n      { count: data.completed, state: 'Completed', fill: color_succeed },\n      { count: data.failed, state: 'Failed', fill: color_error },\n      { count: data.pending, state: 'Pending', fill: color_pending },\n      { count: data.running, state: 'Running', fill: color_in_progress },\n      { count: data.not_started, state: 'Not started', fill: color_blank },\n    ];\n  }, []);\n\n  return (\n    <Card className=\"flex flex-col\">\n      <CardHeader className=\"items-center pb-0\">\n        <CardTitle>{title}</CardTitle>\n        {description && <CardDescription>{description}</CardDescription>}\n      </CardHeader>\n      <CardContent className=\"flex-1 pb-0\">\n        <ChartContainer\n          config={chartConfig}\n          className=\"mx-auto aspect-square max-h-[250px]\"\n        >\n          <PieChart>\n            <ChartTooltip\n              cursor={false}\n              content={<ChartTooltipContent hideLabel />}\n            />\n            <Pie\n              data={chartData}\n              dataKey=\"count\"\n              nameKey=\"state\"\n              innerRadius={60}\n              strokeWidth={5}\n            >\n              <Label\n                content={({ viewBox }) => {\n                  if (viewBox && 'cx' in viewBox && 'cy' in viewBox) {\n                    return (\n                      <text\n                        x={viewBox.cx}\n                        y={viewBox.cy}\n                        textAnchor=\"middle\"\n                        dominantBaseline=\"middle\"\n                      >\n                        <tspan\n                          x={viewBox.cx}\n                          y={viewBox.cy}\n                          className=\"fill-foreground text-3xl font-bold\"\n                        >\n                          {total.toLocaleString()}\n                        </tspan>\n                        <tspan\n                          x={viewBox.cx}\n                          y={(viewBox.cy || 0) + 24}\n                          className=\"fill-muted-foreground\"\n                        >\n                          {label}\n                        </tspan>\n                      </text>\n                    );\n                  }\n                }}\n              />\n            </Pie>\n          </PieChart>\n        </ChartContainer>\n      </CardContent>\n    </Card>\n  );\n}\n\nexport interface IndexProgressChartPlaceholderProps {\n  title: string,\n  label: ReactNode,\n  description?: string\n}\n\nexport function IndexProgressChartPlaceholder ({ title, label, description }: IndexProgressChartPlaceholderProps) {\n  return (\n    <Card className=\"flex flex-col\">\n      <CardHeader className=\"items-center pb-0\">\n        <CardTitle>{title}</CardTitle>\n        {description && <CardDescription>{description}</CardDescription>}\n      </CardHeader>\n      <CardContent className=\"flex-1 pb-0\">\n        <ChartContainer\n          config={chartConfig}\n          className=\"mx-auto aspect-square max-h-[250px]\"\n        >\n          <PieChart>\n            <Pie\n              animationDuration={0}\n              data={[{ count: 1, state: '', fill: color_placeholder }]}\n              dataKey=\"count\"\n              nameKey=\"state\"\n              innerRadius={60}\n              strokeWidth={5}\n            >\n              <Label\n                content={({ viewBox }) => {\n                  if (viewBox && 'cx' in viewBox && 'cy' in viewBox) {\n                    return (\n                      <text\n                        x={viewBox.cx}\n                        y={viewBox.cy}\n                        textAnchor=\"middle\"\n                        dominantBaseline=\"middle\"\n                      >\n                        <tspan\n                          x={viewBox.cx}\n                          y={viewBox.cy}\n                          className=\"fill-muted-foreground text-3xl font-bold\"\n                        >\n                          --\n                        </tspan>\n                        <tspan\n                          x={viewBox.cx}\n                          y={(viewBox.cy || 0) + 24}\n                          className=\"fill-muted-foreground\"\n                        >\n                          {label}\n                        </tspan>\n                      </text>\n                    );\n                  }\n                }}\n              />\n            </Pie>\n          </PieChart>\n        </ChartContainer>\n      </CardContent>\n    </Card>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/charts/TotalCard.stories.tsx",
    "content": "import { TotalCard } from '@/components/charts/TotalCard';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport { Aperture } from 'lucide-react';\n\n\nconst meta = {\n  title: 'Components/Charts/TotalCard',\n  component: TotalCard,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ minWidth: 250 }}>\n        <Story />\n      </div>\n    ),\n  ],\n  argTypes: {\n    total: {\n      type: 'number',\n      control: 'select',\n      options: [42, null, undefined],\n    },\n  },\n  args: {},\n} satisfies Meta<typeof TotalCard>;\n\nexport default meta;\n\ntype Story = StoryObj<typeof meta>\n\nexport const Default: Story = {\n  args: {\n    title: 'Title',\n    isLoading: false,\n    icon: <Aperture />,\n    total: 42,\n    children: 'Hahaha',\n  },\n};\n"
  },
  {
    "path": "frontend/app/src/components/charts/TotalCard.tsx",
    "content": "import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { cn } from '@/lib/utils';\nimport type { ReactNode } from 'react';\n\nconst nf = new Intl.NumberFormat('en-US', {});\n\nexport interface TotalCardProps {\n  isLoading: boolean;\n  title: string,\n  icon: ReactNode,\n  total?: number | null | undefined,\n  children?: ReactNode\n}\n\nexport function TotalCard ({ isLoading = false, title, icon, total, children }: TotalCardProps) {\n  return (\n    <Card>\n      <CardHeader className=\"flex flex-row items-center justify-between space-y-0 pb-2\">\n        <CardTitle className=\"text-sm font-medium\">{title}</CardTitle>\n        {icon}\n      </CardHeader>\n      <CardContent>\n        <div className={cn('text-2xl font-bold', total == null && 'h-8 pt-2')}>\n          {isLoading ? <Skeleton className=\"w-12 h-4\" /> : nf.format(total || 0)}\n        </div>\n        <p className=\"text-xs text-muted-foreground mt-4\">\n          {children}\n        </p>\n      </CardContent>\n    </Card>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/charts/TrendsChart.tsx",
    "content": "'use client';\n\nimport type { TrendResponse } from '@/api/stats';\nimport { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';\nimport { ChartContainer, ChartLegend, ChartLegendContent, ChartTooltip, ChartTooltipContent } from '@/components/ui/chart';\nimport { cn } from '@/lib/utils';\nimport { addDays, format, isAfter } from 'date-fns';\nimport { ReactNode, useMemo } from 'react';\nimport { Bar, BarChart, CartesianGrid, XAxis } from 'recharts';\n\nexport function TrendsChart<T extends { date: Date }> ({\n  className,\n  title,\n  description,\n  data,\n  dimensions,\n  config,\n}: {\n  className?: string,\n  title: ReactNode,\n  description: ReactNode,\n  data: TrendResponse<T>,\n  dimensions: Exclude<string & keyof T, 'date'>[],\n  config: { [P in Exclude<string & keyof T, 'date'>]: { label: string, color: string } }\n}) {\n  const chartData = useMemo(() => {\n    const dates: any[] = [];\n\n    for (let i = data.start_date; !isAfter(i, data.end_date); i = addDays(i, 1)) {\n      dates.push(i);\n    }\n\n    const map = new Map(data.values.map(item => [String(item.date), item]));\n\n    return dates.map(date => ({\n      ...map.get(String(date)),\n      date: format(date, 'yyyy-MM-dd'),\n    }));\n  }, [data]);\n\n  return (\n    <Card className={cn('size-full', className)}>\n      <CardHeader>\n        <CardTitle>{title}</CardTitle>\n        <CardDescription>\n          {description}\n        </CardDescription>\n      </CardHeader>\n      <CardContent style={{ height: 320 }}>\n        <ChartContainer className=\"size-full\" config={{ ...config }}>\n          <BarChart\n            accessibilityLayer\n            data={chartData}\n            margin={{\n              left: 12,\n              right: 12,\n            }}\n          >\n            <CartesianGrid vertical={false} />\n            <XAxis\n              dataKey=\"date\"\n              type=\"category\"\n              tickLine={false}\n              axisLine={false}\n              tickMargin={8}\n            />\n            <ChartTooltip\n              cursor={false}\n\n              content={<ChartTooltipContent indicator=\"dot\" nameKey=\"date\" />}\n            />\n            <ChartLegend content={<ChartLegendContent />} />\n            {dimensions.map((dimension) => (\n              <Bar\n                key={dimension}\n                dataKey={dimension}\n                fill={config[dimension].color}\n                stackId=\"value\"\n              />\n            ))}\n          </BarChart>\n        </ChartContainer>\n      </CardContent>\n      {/*<CardFooter>*/}\n      {/*  <div className=\"flex w-full items-start gap-2 text-sm\">*/}\n      {/*    <div className=\"grid gap-2\">*/}\n      {/*      <div className=\"flex items-center gap-2 font-medium leading-none\">*/}\n      {/*        Trending up by 5.2% this month <TrendingUp className=\"h-4 w-4\" />*/}\n      {/*      </div>*/}\n      {/*      <div className=\"flex items-center gap-2 leading-none text-muted-foreground\">*/}\n      {/*        January - June 2024*/}\n      {/*      </div>*/}\n      {/*    </div>*/}\n      {/*  </div>*/}\n      {/*</CardFooter>*/}\n    </Card>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/ask.tsx",
    "content": "import { MessageInput } from '@/components/chat/message-input';\nimport { type UseAskReturns } from '@/components/chat/use-ask';\nimport { SecuritySettingContext, withReCaptcha } from '@/components/security-setting-provider';\nimport { useContext, useRef } from 'react';\n\nexport function Ask ({ className, loading, disabled, ask, engine, setEngine }: { className?: string } & UseAskReturns) {\n  const ref = useRef<HTMLTextAreaElement>(null);\n  const security = useContext(SecuritySettingContext);\n\n  return (\n    <form\n      className={className}\n      onSubmit={e => {\n        const message = ref.current?.value ?? '';\n        e.preventDefault();\n        withReCaptcha({\n          action: 'ask',\n          siteKey: security?.google_recaptcha_site_key || '',\n          mode: security?.google_recaptcha,\n        }, ({ token, action, siteKey }) => {\n          if (message.trim()) {\n            ask(message, {\n              headers: {\n                'X-Recaptcha-Token': token,\n                'X-Recaptcha-Action': action,\n              },\n            });\n            setEngine(undefined);\n          }\n        });\n      }}\n    >\n      <MessageInput className=\"w-full\" disabled={disabled || loading} inputRef={ref} engine={engine} onEngineChange={setEngine} />\n    </form>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-controller.test.ts",
    "content": "import { ChatMessageRole } from '@/api/chats';\nimport type { ChatController } from '@/components/chat/chat-controller';\nimport type { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport type { ChatInitialData } from '@/components/chat/chat-stream-state';\nimport { jest } from '@jest/globals';\nimport { z } from 'zod';\n\njest.unstable_mockModule('../../api/chats', () => ({\n  chatMessageSchema: z.any(),\n  chatSchema: z.any(),\n  chat: (...args: any) => currentChat(...args),\n}));\n\nlet currentChat: any;\n\nafterAll(() => {\n  // using pnpm patch https://github.com/jestjs/jest/pull/15080/files#diff-c0d5b59e96fdc7ffc98405e8afb46d525505bc7b1c24916b5c8482de5a186c00\n  jest.unstable_unmockModule('../../api/chats');\n});\n\nconst exampleData = {\n  chat: {\n    id: 'mock',\n    created_at: new Date,\n    updated_at: new Date,\n    browser_id: null,\n    deleted_at: null,\n    origin: \"None\",\n    engine_options: {\n      llm: {\n        condense_question_prompt: '',\n        text_qa_prompt: '',\n        refine_prompt: '',\n        intent_graph_knowledge: '',\n        normal_graph_knowledge: '',\n      },\n      knowledge_graph: {\n        depth: 0,\n        enabled: false,\n        include_meta: false,\n        with_degree: false,\n        using_intent_search: false,\n      },\n    },\n    user_id: '0',\n    engine_id: 1,\n    title: 'Demo',\n  },\n  assistant_message: {\n    chat_id: 'mock',\n    content: '',\n    created_at: new Date(),\n    error: null,\n    finished_at: new Date(),\n    id: 2,\n    ordinal: 1,\n    role: ChatMessageRole.assistant,\n    sources: [],\n    trace_url: '',\n    updated_at: new Date(),\n    user_id: '0',\n    post_verification_result_url: null,\n  },\n  user_message: {\n    chat_id: 'mock',\n    content: 'ping',\n    created_at: new Date(),\n    error: null,\n    finished_at: new Date(),\n    id: 1,\n    ordinal: 0,\n    role: ChatMessageRole.user,\n    sources: [],\n    trace_url: '',\n    updated_at: new Date(),\n    user_id: '0',\n    post_verification_result_url: null,\n  },\n} satisfies ChatInitialData;\n\ndescribe('stream protocol', () => {\n  const onPost = jest.fn();\n  const onPostInitialized = jest.fn();\n  const onMessageLoaded = jest.fn();\n  const onPostError = jest.fn();\n  const onPostFinished = jest.fn();\n\n  const postRejection = jest.fn();\n\n  const addListeners = (controller: ChatController) => {\n    controller.on('post', onPost)\n      .on('post-initialized', onPostInitialized)\n      .on('message-loaded', onMessageLoaded)\n      .on('post-error', onPostError)\n      .on('post-finished', onPostFinished);\n  };\n\n  const newChatController = async () => {\n    // for using `jest.unstable_mockModule` mocked module\n    const { ChatController } = await import('./chat-controller');\n\n    const controller = new ChatController();\n    addListeners(controller);\n\n    return controller;\n  };\n\n  test('terminate before server responses', async () => {\n    const error = new Error('terminate before server response');\n    currentChat = () => {\n      throw error;\n    };\n\n    const controller = await newChatController();\n    await controller.post({ content: 'hi' }).catch(postRejection);\n\n    expect(postRejection).toHaveBeenCalledTimes(0);\n\n    expect(onPost).toHaveBeenCalledTimes(1);\n    expect(onPost).toHaveBeenCalledWith({ content: 'hi' });\n\n    expect(onPostInitialized).toHaveBeenCalledTimes(0);\n\n    expect(onPostError).toHaveBeenCalledTimes(1);\n    expect(onPostError).toHaveBeenCalledWith(error);\n  });\n\n  test('terminated by stream protocol', async () => {\n    currentChat = async function* () {\n      yield {\n        type: 'data',\n        value: [exampleData],\n      };\n      yield { type: 'text', value: 'pong' };\n      yield { type: 'error', value: 'terminated' };\n    };\n\n    const controller = await newChatController();\n\n    await controller.post({ content: 'hi' }).catch(postRejection);\n    expect(postRejection).toHaveBeenCalledTimes(0);\n\n    expect(onPost).toHaveBeenCalledTimes(1);\n    expect(onPost).toHaveBeenCalledWith({ content: 'hi' });\n\n    expect(onPostInitialized).toHaveBeenCalledTimes(1);\n    expect(onPostError).toHaveBeenCalledTimes(0);\n\n    expect(onMessageLoaded).toHaveBeenCalledTimes(2);\n\n    const assistantMessage: ChatMessageController = controller.messages.find(msg => msg.role === 'assistant')!;\n    expect(assistantMessage.content).toBe('pong');\n    expect(assistantMessage.message.error).toBe('terminated');\n  });\n\n  test('normal', async () => {\n    currentChat = async function* () {\n      yield {\n        type: 'data',\n        value: [exampleData],\n      };\n      yield { type: 'text', value: 'pong' };\n    };\n\n    const controller = await newChatController();\n\n    await controller.post({ content: 'ping' }).catch(postRejection);\n\n    expect(postRejection).toHaveBeenCalledTimes(0);\n\n    expect(onPostInitialized).toHaveBeenCalledTimes(1);\n    expect(onMessageLoaded).toHaveBeenCalledTimes(2);\n    expect(onPostFinished).toHaveBeenCalledTimes(1);\n\n    const assistantMessage: ChatMessageController = controller.messages.find(msg => msg.role === 'assistant')!;\n    expect(assistantMessage.content).toBe('pong');\n  });\n});\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-controller.ts",
    "content": "import { chat, type Chat, type ChatMessage, type PostChatParams } from '@/api/chats';\nimport { BaseChatMessageController, ChatMessageController, LegacyChatMessageController, type OngoingState, StackVMChatMessageController } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState, type BaseAnnotation, chatDataPartSchema, fixChatInitialData, type StackVMState } from '@/components/chat/chat-stream-state';\nimport type { GtagFn } from '@/components/gtag-provider';\nimport { getErrorMessage } from '@/lib/errors';\nimport { trigger } from '@/lib/react';\nimport { type JSONValue, type StreamPart } from 'ai';\nimport EventEmitter from 'eventemitter3';\n\nexport interface ChatControllerEventsMap<State = AppChatStreamState, Annotation extends BaseAnnotation<State> = BaseAnnotation<State>> {\n  'created': [Chat];\n  'updated': [Chat];\n  'message-loaded': [messageController: BaseChatMessageController<State, Annotation>];\n\n  /**\n   * Emit instantly when {@link ChatController#post} is called\n   */\n  'post': [params: Omit<PostChatParams, 'chat_id'>];\n\n  /**\n   * Emit when server returned chat and chat_message info\n   */\n  'post-initialized': [];\n  'post-finished': [];\n  'post-error': [error: unknown];\n\n  /**\n   * Experimental\n   */\n  'ui:input-mount': [HTMLTextAreaElement | HTMLInputElement];\n  'ui:input-unmount': [HTMLTextAreaElement | HTMLInputElement];\n}\n\nexport class ChatController<State extends AppChatStreamState = AppChatStreamState, Annotation extends BaseAnnotation<State> = BaseAnnotation<State>> extends EventEmitter<ChatControllerEventsMap<State, Annotation>> {\n  public chat: Chat | undefined;\n\n  private _messages: Map<number, ChatMessageController | StackVMChatMessageController> = new Map();\n\n  private _postParams: Omit<PostChatParams, 'chat_id'> | undefined = undefined;\n  private _postError: unknown = undefined;\n  private _postInitialized: boolean = false;\n\n  private _inputElement: HTMLTextAreaElement | HTMLInputElement | null = null;\n\n  get postState () {\n    return {\n      params: this._postParams,\n      error: this._postError,\n      initialized: this._postInitialized,\n    };\n  }\n\n  constructor (\n    chat: Chat | undefined = undefined,\n    messages: ChatMessage[] | undefined = [],\n    initialPost: Omit<PostChatParams, 'chat_id'> | undefined = undefined,\n    inputElement: HTMLInputElement | HTMLTextAreaElement | null = null,\n    private readonly _gtagFn: GtagFn = () => {},\n  ) {\n    super();\n    if (chat) {\n      this.chat = chat;\n    }\n    for (const message of messages) {\n      this.upsertMessage(message);\n    }\n    if (initialPost) {\n      this.post(initialPost);\n    }\n    this._inputElement = inputElement;\n    if (inputElement) {\n      this.emit('ui:input-mount', inputElement);\n    }\n  }\n\n  get inputElement () {\n    return this._inputElement;\n  }\n\n  set inputElement (value: HTMLInputElement | HTMLTextAreaElement | null) {\n    if (this._inputElement) {\n      if (value) {\n        if (value !== this._inputElement) {\n          const old = this._inputElement;\n          this._inputElement = null;\n          this.emit('ui:input-unmount', old);\n\n          this._inputElement = value;\n          this.emit('ui:input-mount', value);\n        }\n      } else {\n        const old = this._inputElement;\n        this._inputElement = null;\n        this.emit('ui:input-unmount', old);\n      }\n    } else {\n      if (value) {\n        this._inputElement = value;\n        this.emit('ui:input-mount', value);\n      }\n    }\n  }\n\n  private get _enabledInputElement () {\n    if (!this._inputElement) {\n      console.warn('Input element is not exists.');\n      return;\n    }\n    if (this._inputElement.disabled) {\n      console.warn('Input element is disabled currently.');\n      return;\n    }\n\n    return this._inputElement;\n  }\n\n  get inputEnabled () {\n    if (!this._inputElement) {\n      return false;\n    }\n\n    return !this._inputElement.disabled;\n  }\n\n  get input (): string {\n    return this._inputElement?.value ?? '';\n  }\n\n  set input (value: string) {\n    const inputElement = this._enabledInputElement;\n    if (inputElement) {\n      trigger(inputElement as HTMLTextAreaElement, HTMLTextAreaElement, value);\n    }\n  }\n\n  focusInput () {\n    const inputElement = this._enabledInputElement;\n    if (inputElement) {\n      inputElement.focus();\n    }\n  }\n\n  get messages (): (ChatMessageController | StackVMChatMessageController)[] {\n    return Array.from(this._messages.values()).sort((a, b) => a.message.ordinal - b.message.ordinal);\n  }\n\n  async post (params: Omit<PostChatParams, 'chat_id'>) {\n    if (this._postParams) {\n      throw new Error('previous not finished.');\n    }\n\n    if (!params.content.trim()) {\n      throw new Error('Empty message');\n    }\n\n    this._gtagFn('event', 'tidbai.events.message-start', {\n      'tidbai_appending_message': !!this.chat?.id,\n    });\n\n    // Initialize post states\n    this._postParams = params;\n    this._postError = undefined;\n    this._postInitialized = false;\n    this.emit('post', params);\n\n    let ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined = undefined;\n\n    try {\n      const streamParts = chat({\n        chat_id: this.chat?.id,\n        ...params,\n      });\n\n      // Process stream parts and dispatch to ongoingMessageController\n      for await (let part of streamParts) {\n        ongoingMessageController = this._processPart(ongoingMessageController, part);\n      }\n\n      // Cleanup post states\n      if (ongoingMessageController) {\n        this.upsertMessage(ongoingMessageController.finish());\n      } else {\n        console.warn('Empty ongoing message');\n      }\n\n      this._postParams = undefined;\n      this._postInitialized = false;\n      this.emit('post-finished');\n\n      this._gtagFn('event', 'tidbai.events.message-finish', {});\n    } catch (error) {\n      ongoingMessageController?.applyError(getErrorMessage(error));\n      this._postError = error;\n      this.emit('post-error', error);\n\n      this._gtagFn('event', 'tidbai.events.message-error', {});\n    }\n  }\n\n  // TODO: wait server implementation\n  async regenerate (messageId: number) {\n    throw new Error('not supported.');\n  }\n\n  updateChat (chat: Chat): void {\n    const newCreated = !this.chat;\n    this.chat = { ...this.chat, ...chat };\n    this.emit(newCreated ? 'created' : 'updated', this.chat);\n\n    if (process.env.NEXT_PUBLIC_IS_WIDGET) {\n      if (newCreated) {\n        window.dispatchEvent(new CustomEvent('tidbainewchat', {\n          detail: { id: chat.id },\n        }));\n      }\n    }\n  }\n\n  upsertMessage (message: ChatMessage): void {\n    let controller = this._messages.get(message.id);\n\n    if (controller) {\n      controller.update(message);\n    } else {\n      this.createMessage(message);\n    }\n  }\n\n  _processPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, any, any>['parse']>) {\n    switch (part.type) {\n      case 'data':\n        // Data part contains chat and chat_message info from server. will be sent twice (beginning and finished).\n        // We will update frontend cached and computed info like message content which is computed from stream text deltas.\n        ongoingMessageController = this._processDataPart(ongoingMessageController, part);\n        break;\n      case 'message_annotations':\n        // Message annotations part containing current generating state.\n        this._processMessageAnnotationPart(ongoingMessageController, part);\n        break;\n      case 'text':\n        this._processTextPart(ongoingMessageController, part);\n        break;\n      case 'error':\n        this._processErrorPart(ongoingMessageController, part);\n        break;\n      case 'tool_call':\n        this._processToolCallPart(ongoingMessageController, part);\n        break;\n      case 'tool_result':\n        this._processToolResultPart(ongoingMessageController, part);\n        break;\n      case 'finish_step':\n\n      case 'finish_message':\n        // TODO\n        break;\n      default:\n        console.warn('unsupported stream part', part);\n    }\n    return ongoingMessageController;\n  }\n\n  private _processDataPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, 'data', JSONValue[]>['parse']>): ChatMessageController | StackVMChatMessageController {\n    const { chat, user_message, assistant_message } = chatDataPartSchema.parse(fixChatInitialData(part.value[0]));\n    this.updateChat(chat);\n    this.upsertMessage(user_message);\n    if (!ongoingMessageController) {\n      ongoingMessageController = this.createMessage(assistant_message, true);\n      this._postInitialized = true;\n      this.emit('post-initialized');\n    } else {\n      ongoingMessageController.update(assistant_message);\n    }\n\n    return ongoingMessageController;\n  }\n\n  private _processMessageAnnotationPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, 'message_annotations', JSONValue[]>['parse']>) {\n    assertNonNull(ongoingMessageController, 'Cannot handle chat stream part: no ongoingMessageController', part);\n    const annotation = ongoingMessageController.parseAnnotation(part.value[0]);\n    ongoingMessageController.applyStreamAnnotation(annotation as never);\n  }\n\n  private _processTextPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, 'text', string>['parse']>) {\n    if (part.value) { // ignore leading empty chunks.\n      assertNonNull(ongoingMessageController, 'Cannot handle chat stream part: no ongoingMessageController', part);\n      ongoingMessageController.applyDelta(part.value);\n    }\n  }\n\n  private _processErrorPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, 'error', string>['parse']>) {\n    assertNonNull(ongoingMessageController, 'Cannot handle chat stream part: no ongoingMessageController', part);\n    ongoingMessageController.applyError(part.value);\n  }\n\n  private _processToolCallPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, 'tool_call', { toolCallId: string, toolName: string, args: any }>['parse']>) {\n    assertNonNull(ongoingMessageController, 'Cannot handle chat stream part: no ongoingMessageController', part);\n    ongoingMessageController.applyToolCall(part.value);\n  }\n\n  private _processToolResultPart (ongoingMessageController: ChatMessageController | StackVMChatMessageController | undefined, part: ReturnType<StreamPart<any, 'tool_call', { toolCallId: string, result: any }>['parse']>) {\n    assertNonNull(ongoingMessageController, 'Cannot handle chat stream part: no ongoingMessageController', part);\n    ongoingMessageController.applyToolResult(part.value);\n  }\n\n  private createMessage (message: ChatMessage, initialOngoingState?: true) {\n    if (!this.chat?.engine_options) {\n      throw new Error('Unable to decide which chat engine used.');\n    }\n\n    if (this.chat.engine_options.external_engine_config?.stream_chat_api_url) {\n      return this.createStackVMMessage(message, initialOngoingState);\n    } else {\n      return this.createLegacyMessage(message, initialOngoingState);\n    }\n  }\n\n  private createLegacyMessage (message: ChatMessage, initialOngoingState?: true | OngoingState) {\n    const controller = new LegacyChatMessageController(message, initialOngoingState);\n    this._messages.set(message.id, controller);\n    this.emit('message-loaded', controller as any);\n    return controller;\n  }\n\n  private createStackVMMessage (message: ChatMessage, initialOngoingState?: true | OngoingState<StackVMState>) {\n    const controller = new StackVMChatMessageController(message, initialOngoingState);\n    this._messages.set(message.id, controller);\n    this.emit('message-loaded', controller as any);\n    return controller;\n  }\n}\n\nfunction assertNonNull<T> (value: T, message: string, ...args: any): asserts value is NonNullable<T> {\n  if (value == null) {\n    console.warn(message, args);\n    throw new Error('bad stream');\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-hooks.tsx",
    "content": "import { type Chat, type ChatMessage, ChatMessageRole } from '@/api/chats';\nimport { isBootstrapStatusPassed } from '@/api/system';\nimport { ChatController } from '@/components/chat/chat-controller';\nimport { BaseChatMessageController, ChatMessageController, type ChatMessageControllerAnnotationState, type OngoingState, type OngoingStateHistoryItem } from '@/components/chat/chat-message-controller';\nimport type { AppChatStreamState, StackVMState } from '@/components/chat/chat-stream-state';\nimport { useGtagFn } from '@/components/gtag-provider';\nimport { useBootstrapStatus } from '@/components/system/BootstrapStatusProvider';\nimport { useLatestRef } from '@/components/use-latest-ref';\nimport { createContext, type ReactNode, useContext, useEffect, useState } from 'react';\n\nexport interface ChatsProviderValues {\n  chats: Map<string, ChatController>;\n  disabled: boolean;\n\n  newChat (...args: ConstructorParameters<typeof ChatController>): ChatController;\n\n  destroyChat (id: string): void;\n}\n\nconst ChatsContext = createContext<ChatsProviderValues>({\n  chats: new Map(),\n  disabled: true,\n  newChat (): ChatController {\n    throw new Error('not in a chat context');\n  },\n  destroyChat () {\n    throw new Error('not in a chat context');\n  },\n});\n\nconst ChatControllerContext = createContext<ChatController | null>(null);\n\nexport function ChatsProvider ({ onChatCreated, children }: { children: ReactNode, /** @deprecated */onChatCreated?: (id: string, chat: Chat, controller: ChatController) => void }) {\n  const bootstrapStatusRef = useLatestRef(useBootstrapStatus());\n  const [chats, setChats] = useState(() => new Map<string, ChatController>);\n\n  const newChat: ChatsProviderValues['newChat'] = (...args) => {\n    if (!isBootstrapStatusPassed(bootstrapStatusRef.current)) {\n      throw new Error('System check not passed.');\n    }\n\n    const controller = new ChatController(...args);\n    controller.once('created', (chat) => {\n      setChats(chats => new Map(chats).set(chat.id, controller));\n      onChatCreated?.(chat.id, chat, controller);\n    });\n\n    return controller;\n  };\n\n  const destroyChat: ChatsProviderValues['destroyChat'] = (id: string) => {\n    setChats(chats => {\n      chats = new Map(chats);\n      chats.delete(id);\n      return chats;\n    });\n  };\n\n  return (\n    <ChatsContext.Provider value={{\n      chats,\n      disabled: !isBootstrapStatusPassed(bootstrapStatusRef.current),\n      newChat,\n      destroyChat,\n    }}>\n      {children}\n    </ChatsContext.Provider>\n  );\n}\n\nexport function ChatControllerProvider ({ controller, children }: { controller: ChatController | null, children: ReactNode }) {\n\n  return (\n    <ChatControllerContext.Provider value={controller}>\n      {children}\n    </ChatControllerContext.Provider>\n  );\n}\n\nexport function useChats () {\n  return useContext(ChatsContext);\n}\n\nexport interface ChatMessageGroup {\n  user: ChatMessageController;\n  assistant: ChatMessageController | undefined;\n  hasFirstAssistantMessage: boolean;\n  hasLastAssistantMessage: boolean;\n  hasLastUserMessage: boolean;\n}\n\nexport function useChatController (\n  id: string | undefined,\n  initialChat: Chat | undefined,\n  initialMessages: ChatMessage[] | undefined,\n  inputElement: HTMLInputElement | HTMLTextAreaElement | null = null,\n) {\n  const gtagFn = useGtagFn();\n  const { chats } = useChats();\n\n  // Create essential chat controller\n  const [controller] = useState(() => {\n    if (id) {\n      let controller = chats.get(id);\n      if (!controller) {\n        controller = new ChatController(initialChat, initialMessages, undefined, inputElement, gtagFn);\n        chats.set(id, controller);\n      }\n      return controller;\n    } else {\n      return new ChatController(undefined, undefined, undefined, inputElement, gtagFn);\n    }\n  });\n\n  useEffect(() => {\n    controller.inputElement = inputElement;\n  }, [controller, inputElement]);\n\n  return controller;\n}\n\nexport function useChatInfo (controller: ChatController) {\n  const [chat, setChat] = useState(controller.chat);\n\n  useEffect(() => {\n    if (controller) {\n      setChat(controller.chat);\n      const handleChatUpdated = (chat: Chat) => setChat(chat);\n      controller\n        .on('updated', handleChatUpdated)\n        .on('created', handleChatUpdated);\n      return () => {\n        controller\n          .off('updated', handleChatUpdated)\n          .off('created', handleChatUpdated);\n      };\n    }\n  }, [controller]);\n\n  return chat;\n}\n\nexport function useChatPostState (controller: ChatController | undefined) {\n  const [state, setState] = useState(controller?.postState ?? { initialized: false, params: undefined, error: undefined });\n\n  useEffect(() => {\n    if (controller) {\n      setState(controller.postState);\n\n      const handleStateChange = () => {\n        setState(controller.postState);\n      };\n\n      controller\n        .on('post', handleStateChange)\n        .on('post-initialized', handleStateChange)\n        .on('post-finished', handleStateChange)\n        .on('post-error', handleStateChange);\n\n      return () => {\n        controller\n          .off('post', handleStateChange)\n          .off('post-initialized', handleStateChange)\n          .off('post-finished', handleStateChange)\n          .off('post-error', handleStateChange);\n      };\n    }\n  }, [controller]);\n\n  return state;\n}\n\nexport function useChatMessageControllers (controller: ChatController) {\n  const [messageControllers, setMessageControllers] = useState(() => controller?.messages);\n\n  useEffect(() => {\n    if (controller) {\n      setMessageControllers(controller.messages);\n\n      const handleMessageLoaded = () => {\n        setMessageControllers(controller.messages);\n      };\n\n      controller.on('message-loaded', handleMessageLoaded);\n\n      return () => {\n        controller.off('message-loaded', handleMessageLoaded);\n      };\n    }\n  }, [controller]);\n\n  return messageControllers;\n}\n\nexport function useChatMessageGroups (controllers: ChatMessageController[]) {\n  const [chatMessageGroups, setChatMessageGroups] = useState<ChatMessageGroup[]>(() => collectMessageGroups(controllers));\n\n  useEffect(() => {\n    setChatMessageGroups(collectMessageGroups(controllers));\n  }, [controllers]);\n\n  return chatMessageGroups;\n}\n\nfunction collectMessageGroups (messageControllers: ChatMessageController[]) {\n  const groups: ChatMessageGroup[] = [];\n\n  let user: ChatMessageController | undefined;\n\n  for (let messageController of messageControllers) {\n    switch (messageController.role) {\n      case ChatMessageRole.user:\n        user = messageController;\n        break;\n      case ChatMessageRole.assistant:\n        if (user) {\n          groups.push({\n            user,\n            assistant: messageController,\n            hasFirstAssistantMessage: false,\n            hasLastAssistantMessage: false,\n            hasLastUserMessage: false,\n          });\n        } else {\n          console.warn('No matched user message, drop assistant message', messageController.message.id);\n        }\n        break;\n    }\n  }\n\n  let group = groups.findLast(group => !!group.assistant);\n  if (group) {\n    group.hasLastAssistantMessage = true;\n  }\n  group = groups.find(group => !!group.assistant);\n  if (group) {\n    group.hasFirstAssistantMessage = true;\n  }\n  group = groups.findLast(group => !!group.user);\n  if (group) {\n    group.hasLastUserMessage = true;\n  }\n  return groups;\n}\n\nexport function useCurrentChatController () {\n  const controller = useContext(ChatControllerContext);\n\n  if (!controller) {\n    throw new Error('Not in a chat controller provider');\n  }\n\n  return controller;\n}\n\nexport function useChatMessageField<K extends keyof ChatMessage> (controller: ChatMessageController, key: K): ChatMessage[K];\nexport function useChatMessageField<K extends keyof ChatMessage> (controller: ChatMessageController | undefined, key: K): ChatMessage[K] | undefined;\nexport function useChatMessageField (controller: ChatMessageController | undefined, key: keyof ChatMessage): any {\n  const [value, setValue] = useState(controller?.message[key]);\n\n  useEffect(() => {\n    if (controller) {\n      setValue(controller.message[key]);\n\n      const handleUpdate = (message: ChatMessage) => {\n        setValue(message[key]);\n      };\n\n      controller\n        .on('update', handleUpdate)\n        .on('stream-update', handleUpdate)\n        .on('stream-error', handleUpdate)\n        .on('stream-finished', handleUpdate);\n      return () => {\n        controller\n          .off('update', handleUpdate)\n          .off('stream-update', handleUpdate)\n          .off('stream-error', handleUpdate)\n          .off('stream-finished', handleUpdate);\n      };\n    } else {\n      setValue(undefined);\n    }\n  }, [controller, key]);\n\n  return value;\n}\n\nexport function useChatMessageStreamState<C extends ChatMessageController> (controller: C | undefined): OngoingState<ChatMessageControllerAnnotationState<C>> | undefined {\n  const [state, setState] = useState(controller?.ongoing);\n\n  useEffect(() => {\n    if (controller) {\n      setState(controller.ongoing);\n\n      const handleUpdate = (_: any, state?: OngoingState<AppChatStreamState | StackVMState>) => setState(state);\n\n      controller\n        .on('stream-update', handleUpdate)\n        .on('stream-error', handleUpdate)\n        .on('stream-finished', handleUpdate);\n\n      return () => {\n        controller\n          .off('stream-update', handleUpdate)\n          .off('stream-error', handleUpdate)\n          .off('stream-finished', handleUpdate);\n      };\n    } else {\n      setState(undefined);\n    }\n  }, [controller]);\n\n  return state as OngoingState<ChatMessageControllerAnnotationState<C>>;\n}\n\nexport function useChatMessageStreamHistoryStates<C extends BaseChatMessageController<any, any>> (controller: C | undefined): C['ongoingHistory'] {\n  const [state, setState] = useState(controller?.ongoingHistory);\n\n  useEffect(() => {\n    if (controller) {\n      setState(controller.ongoingHistory);\n\n      const handleUpdate = (_: any, state?: OngoingStateHistoryItem[]) => {\n        if (state) {\n          setState(state);\n        }\n      };\n\n      controller\n        .on('stream-history-update', handleUpdate)\n        .on('stream-finished', handleUpdate);\n\n      return () => {\n        controller\n          .off('stream-history-update', handleUpdate)\n          .off('stream-finished', handleUpdate);\n      };\n    } else {\n      setState(undefined);\n    }\n  }, [controller]);\n\n  return state;\n}\n\nexport function useChatMessageStreamContainsState (controller: ChatMessageController | undefined, state: AppChatStreamState) {\n  const history = useChatMessageStreamHistoryStates(controller);\n  const current = useChatMessageStreamState(controller);\n\n  // FIXME: what if state not triggered?\n  if (!current || current.finished) {\n    return true;\n  }\n  return history?.some(item => item.state.state === state) || current?.state === state;\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-message-controller.test.ts",
    "content": "import { ChatMessageController, LegacyChatMessageController } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState } from '@/components/chat/chat-stream-state';\nimport { createExampleInitialChatMessage } from '@/components/chat/testutils';\nimport { jest } from '@jest/globals';\n\ndescribe('stream', () => {\n\n  const onUpdate = jest.fn();\n  const onStreamUpdate = jest.fn();\n  const onStreamError = jest.fn();\n  const onStreamFinished = jest.fn();\n\n  test('success', () => {\n\n    const controller = new LegacyChatMessageController(createExampleInitialChatMessage(), true);\n\n    controller.on('update', onUpdate)\n      .on('stream-update', onStreamUpdate)\n      .on('stream-error', onStreamError)\n      .on('stream-finished', onStreamFinished);\n\n    // TRACE event should provide langfuse_url\n    expect(controller.message.trace_url).toBe('');\n    controller.applyStreamAnnotation({\n      state: AppChatStreamState.TRACE,\n      display: 'trace',\n      context: { langfuse_url: 'fake_url' },\n    });\n    expect(controller.message.trace_url).toBe('fake_url');\n\n    // SOURCE_NODES event should provide sources\n    expect(controller.message.sources.length).toBe(0);\n    controller.applyStreamAnnotation({\n      state: AppChatStreamState.SOURCE_NODES,\n      display: 'source_nodes',\n      context: [{ source_uri: 'uri', id: 0, name: 'foo' }],\n    });\n    expect(controller.message.sources).toStrictEqual([{ source_uri: 'uri', id: 0, name: 'foo' }]);\n\n    // test applyDelta\n    expect(controller.message.content).toEqual('');\n    controller.applyDelta('Hello');\n    expect(controller.message.content).toEqual('Hello');\n    controller.applyDelta(' world!');\n    expect(controller.message.content).toEqual('Hello world!');\n    expect(onStreamUpdate).toHaveBeenCalledTimes(4);\n\n    controller.finish();\n    expect(controller.ongoing).toBeUndefined();\n    expect(onStreamFinished).toHaveBeenCalledTimes(1);\n    expect(onStreamError).toHaveBeenCalledTimes(0);\n  });\n\n  test('error', () => {\n    const controller = new LegacyChatMessageController(createExampleInitialChatMessage(), true);\n\n    controller.on('update', onUpdate)\n      .on('stream-update', onStreamUpdate)\n      .on('stream-error', onStreamError)\n      .on('stream-finished', onStreamFinished);\n\n    controller.applyError('error');\n\n    expect(controller.ongoing!.finished).toBe(true);\n    expect(controller.message.error).toBe('error');\n\n    expect(onStreamFinished).toHaveBeenCalledTimes(0);\n    expect(onStreamError).toHaveBeenCalledTimes(1);\n  });\n});\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-message-controller.ts",
    "content": "import { type ChatMessage, ChatMessageRole } from '@/api/chats';\nimport { AppChatStreamState, type BaseAnnotation, type ChatMessageAnnotation, type StackVMState, type StackVMStateAnnotation } from '@/components/chat/chat-stream-state';\nimport { StackVM } from '@/lib/stackvm';\nimport EventEmitter from 'eventemitter3';\n\nexport interface OngoingState<State = AppChatStreamState> {\n  finished: boolean;\n  state: State;\n  display: string;\n  message?: string;\n}\n\nexport interface OngoingStateHistoryItem<State = AppChatStreamState> {\n  state: OngoingState<State>;\n  time: Date;\n}\n\nexport interface ChatMessageControllerEventsMap<State = AppChatStreamState> {\n  'update': [assistant_message: ChatMessage];\n  'stream-update': [ongoing_message: ChatMessage, ongoing: OngoingState<State>, delta: string];\n  'stream-history-update': [ongoing_message: ChatMessage, history: { state: OngoingState<State>, time: Date }[]];\n  'stream-finished': [ongoing_message: ChatMessage];\n  'stream-error': [ongoing_message: ChatMessage, ongoing: OngoingState<State>];\n\n  'stream-tool-call': [id: string, name: string, args: any];\n  'stream-tool-result': [id: string, result: any];\n}\n\nexport abstract class BaseChatMessageController<\n  State,\n  Annotation extends BaseAnnotation<State>\n> extends EventEmitter<ChatMessageControllerEventsMap<State>> {\n  protected _message: ChatMessage;\n  protected _ongoing: OngoingState<State> | undefined;\n  protected _ongoingHistory: OngoingStateHistoryItem<State>[] | undefined;\n  public readonly role: ChatMessageRole;\n  public readonly id: number;\n\n  constructor (message: ChatMessage, ongoing: OngoingState<State> | true | undefined) {\n    super();\n    this._message = message;\n    this._ongoing = ongoing === true ? this.createInitialOngoingState() : ongoing;\n    this._ongoingHistory = ongoing ? [] : undefined;\n    this.role = message.role;\n    this.id = message.id;\n\n    if (this._message.finished_at == null && !ongoing) {\n      this._ongoing = this.createUnknownOngoingState();\n    }\n  }\n\n  // dynamic, usage in react component needs subscription.\n  get content () {\n    return this.message?.content ?? '';\n  }\n\n  update (message: ChatMessage) {\n    this._message = { ...this._message, ...message };\n    this.emit('update', this._message);\n  }\n\n  applyStreamAnnotation (annotation: Annotation) {\n    if (!this._ongoing || this._ongoing.finished) {\n      console.warn('message already finished');\n      return;\n    }\n    const stateChanged = annotation.state !== this._ongoing.state;\n\n    let message = this._message;\n    const ongoing: OngoingState<State> = { ...this._ongoing };\n\n    ongoing.state = annotation.state;\n    ongoing.display = annotation.display || (stateChanged ? '' : ongoing.display);\n    ongoing.message = stateChanged ? undefined : ongoing.message;\n\n    message = this._polishMessage(message, ongoing, annotation);\n\n    const lastOngoing = this._ongoing;\n\n    this._ongoing = ongoing;\n    this._message = message;\n    if (annotation.state === AppChatStreamState.FINISHED) {\n      this._ongoing.finished = true;\n    }\n    this.emit('stream-update', this._message, this._ongoing, '');\n\n    if (stateChanged && this._ongoingHistory != null) {\n      const lastState = this._ongoingHistory[this._ongoingHistory.length - 1];\n      if (lastOngoing && lastOngoing.display && lastOngoing.state !== lastState?.state.state) {\n        // Insert new state\n        this._ongoingHistory = [\n          ...this._ongoingHistory,\n          {\n            state: lastOngoing,\n            time: new Date(),\n          },\n        ];\n        this.emit('stream-history-update', this._message, this._ongoingHistory);\n      }\n    }\n  }\n\n  applyDelta (delta: string) {\n    if (!this._ongoing || this._ongoing.finished) {\n      console.warn('message already finished');\n      return;\n    }\n    this._message = {\n      ...this._message,\n      content: this._message.content + delta,\n    };\n    this.emit('stream-update', this._message, this._ongoing, delta);\n  }\n\n  applyError (error: string) {\n    if (!this._ongoing || this._ongoing.finished) {\n      console.warn('message already finished');\n      console.error('Error in ChatMessageController (on finished message):', error);\n      return;\n    }\n    this._ongoing = {\n      ...this._ongoing,\n      finished: true,\n    };\n    this._message = {\n      ...this._message,\n      error,\n    };\n    this.emit('stream-error', this._message, this._ongoing);\n  }\n\n  applyToolCall ({ toolCallId, toolName, args }: { toolCallId: string, toolName: string, args: any }) {\n    this.emit('stream-tool-call', toolCallId, toolName, args);\n  }\n\n  applyToolResult ({ toolCallId, result }: { toolCallId: string, result: any }) {\n    this.emit('stream-tool-result', toolCallId, result);\n  }\n\n  finish () {\n    this._ongoing = undefined;\n    this.emit('stream-finished', this._message);\n    return this._message;\n  }\n\n  get message (): ChatMessage {\n    return this._message;\n  }\n\n  get ongoing () {\n    return this._ongoing;\n  }\n\n  get ongoingHistory () {\n    return this._ongoingHistory;\n  }\n\n  abstract parseAnnotation (raw: unknown): Annotation;\n\n  abstract createInitialOngoingState (): OngoingState<State>;\n\n  abstract createUnknownOngoingState (): OngoingState<State>;\n\n  protected abstract _polishMessage (message: ChatMessage, ongoing: OngoingState<State>, annotation: Annotation): ChatMessage\n}\n\nexport type ChatMessageController = LegacyChatMessageController | StackVMChatMessageController;\nexport type ChatMessageControllerAnnotationState<C extends ChatMessageController> = C extends BaseChatMessageController<infer State, any> ? State : never;\n\nexport class LegacyChatMessageController extends BaseChatMessageController<AppChatStreamState, ChatMessageAnnotation> {\n  readonly version = 'Legacy';\n\n  parseAnnotation (raw: unknown): ChatMessageAnnotation {\n    return raw as ChatMessageAnnotation;\n  }\n\n  createInitialOngoingState (): OngoingState {\n    return {\n      state: AppChatStreamState.CONNECTING,\n      display: 'Connecting to server...',\n      finished: false,\n    };\n  }\n\n  createUnknownOngoingState (): OngoingState {\n    return {\n      state: AppChatStreamState.UNKNOWN,\n      display: 'Unknown',\n      finished: false,\n    };\n  }\n\n  _polishMessage (message: ChatMessage, ongoing: OngoingState, annotation: ChatMessageAnnotation) {\n    switch (annotation.state) {\n      case AppChatStreamState.TRACE:\n        message = { ...message };\n        message.trace_url = annotation.context.langfuse_url;\n        break;\n      case AppChatStreamState.SOURCE_NODES:\n        message = { ...message };\n        message.sources = annotation.context;\n        break;\n      case AppChatStreamState.REFINE_QUESTION:\n        ongoing.message = annotation.message || ongoing.message;\n        break;\n    }\n\n    return message;\n  }\n}\n\nexport class StackVMChatMessageController extends BaseChatMessageController<StackVMState, StackVMStateAnnotation> {\n  readonly version = 'StackVM';\n\n  applyToolCall (payload: { toolCallId: string; toolName: string; args: any }) {\n    super.applyToolCall(payload);\n    if (this._ongoing) {\n      this._ongoing = {\n        ...this._ongoing,\n        state: {\n          ...this._ongoing.state,\n          toolCalls: [...this._ongoing.state.toolCalls, payload],\n        },\n      };\n      this.emit('stream-update', this._message, this._ongoing, '');\n    }\n  }\n\n  applyToolResult (payload: { toolCallId: string; result: any }) {\n    super.applyToolResult(payload);\n    if (this._ongoing) {\n      const idx = this._ongoing.state.toolCalls.findIndex(toolCall => toolCall.toolCallId === payload.toolCallId);\n      if (idx >= 0) {\n        this._ongoing.state.toolCalls[idx] = {\n          ...this._ongoing.state.toolCalls[idx],\n          result: payload.result,\n        };\n        this._ongoing.state = { ...this._ongoing.state };\n        this._ongoing = { ...this._ongoing };\n        this.emit('stream-update', this._message, this._ongoing, '');\n      }\n    }\n  }\n\n  parseAnnotation (raw: unknown): StackVMStateAnnotation {\n    const { state: rawState, task_id, branch, seq_no } = raw as { state: StackVM.State, task_id: string, branch: string, seq_no: number };\n    const state = StackVM.model.parseState(rawState);\n\n    return {\n      state: { task_id, branch, state, toolCalls: [], seq_no },\n      display: '[deprecated]',\n    };\n  }\n\n  createInitialOngoingState (): OngoingState<StackVMState> {\n    return {\n      state: {\n        task_id: '',\n        branch: '',\n        seq_no: -1,\n        state: {\n          variables_refs: {},\n          variables: {},\n          errors: [],\n          current_plan: [],\n          program_counter: -1,\n          goal_completed: false,\n          goal: '',\n          msgs: [],\n          plan: {\n            steps: [],\n            vars: [],\n          },\n        },\n        toolCalls: [],\n      },\n      display: 'Thinking...',\n      finished: false,\n    };\n  }\n\n  createUnknownOngoingState (): OngoingState<StackVMState> {\n    return {\n      state: {\n        task_id: '',\n        branch: '',\n        seq_no: -1,\n        state: {\n          variables_refs: {},\n          variables: {},\n          errors: ['Unknown state'],\n          current_plan: [],\n          program_counter: -1,\n          goal_completed: false,\n          goal: '',\n          msgs: [],\n          plan: {\n            steps: [],\n            vars: [],\n          },\n        },\n        toolCalls: [],\n      },\n      display: 'Unknown',\n      finished: false,\n    };\n  }\n\n  _polishMessage (message: ChatMessage): ChatMessage {\n    return message;\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-new-dialog.tsx",
    "content": "import { Ask } from '@/components/chat/ask';\nimport { useAsk } from '@/components/chat/use-ask';\nimport { Button } from '@/components/ui/button';\nimport { Dialog, DialogDescription, DialogHeader, DialogOverlay, DialogPortal, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { cn } from '@/lib/utils';\nimport * as DialogPrimitive from '@radix-ui/react-dialog';\nimport { CommandIcon, PlusIcon } from 'lucide-react';\nimport { useEffect, useState } from 'react';\n\nexport function ChatNewDialog () {\n  const [open, setOpen] = useState(false);\n  const ask = useAsk(() => {\n    setOpen(false);\n  });\n  useEffect(() => {\n    const handle = (e: KeyboardEvent) => {\n      if (e.key === 'k' && (e.metaKey || e.ctrlKey) && !(e.shiftKey)) {\n        setOpen(true);\n        e.preventDefault();\n        e.stopPropagation();\n      }\n    };\n    window.addEventListener('keydown', handle);\n    return () => {\n      window.removeEventListener('keydown', handle);\n    };\n  }, []);\n\n  return (\n    <Dialog open={ask.loading || open} onOpenChange={setOpen}>\n      <DialogHeader className=\"sr-only\">\n        <DialogTitle>Ask Question Dialog</DialogTitle>\n        <DialogDescription></DialogDescription>\n      </DialogHeader>\n      <DialogTrigger asChild>\n        <Button variant=\"outline\" className=\"w-full mb-4 rounded-full cursor-text font-normal text-foreground/70 gap-2\">\n          <PlusIcon className=\"size-4 text-muted-foreground\" />\n          New Thread\n          <span className=\"ml-auto flex-shrink-0 flex gap-1 items-center rounded-full\"><CommandIcon size=\"1em\" /> K</span>\n        </Button>\n      </DialogTrigger>\n      <DialogPortal>\n        <DialogOverlay />\n        <DialogPrimitive.DialogContent\n          className={cn(\n            'fixed left-[50%] top-[50%] z-50 grid w-full max-w-3xl translate-x-[-50%] translate-y-[-50%] gap-4 bg-accent shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg',\n            'p-4',\n          )}\n        >\n          <Ask {...ask} />\n        </DialogPrimitive.DialogContent>\n      </DialogPortal>\n    </Dialog>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/chat/chat-stream-state.ts",
    "content": "/**\n *  TRACE = 0\n *     SOURCE_NODES = 1\n *     KG_RETRIEVAL = 2\n *     REFINE_QUESTION = 3\n *     SEARCH_RELATED_DOCUMENTS = 4\n *     GENERATE_ANSWER = 5\n *     FINISHED = 9\n */\nimport { Chat, ChatMessage, chatMessageSchema, ChatMessageSource, chatSchema } from '@/api/chats';\nimport { StackVM } from '@/lib/stackvm';\nimport { z, type ZodType } from 'zod';\n\nexport const enum BaseState {\n  CONNECTING = 'CONNECTING', // only client side\n  UNKNOWN = 'UNKNOWN',\n}\n\nexport const enum AppChatStreamState {\n  CONNECTING = 'CONNECTING', // only client side\n  TRACE = 'TRACE',\n  SOURCE_NODES = 'SOURCE_NODES',\n  KG_RETRIEVAL = 'KG_RETRIEVAL',\n  REFINE_QUESTION = 'REFINE_QUESTION',\n  SEARCH_RELATED_DOCUMENTS = 'SEARCH_RELATED_DOCUMENTS',\n  RERANKING = 'RERANKING',\n  GENERATE_ANSWER = 'GENERATE_ANSWER',\n  FINISHED = 'FINISHED',\n  FAILED = 'FAILED',\n  UNKNOWN = 'UNKNOWN',\n}\n\nexport type StackVMState = {\n  task_id: string;\n  branch: string;\n  state: StackVM.model.ParsedState;\n  seq_no: number;\n  toolCalls: StackVMToolCall[];\n};\n\nexport type StackVMToolCall = { toolCallId: string, toolName: string, args: any, result?: any }\n\nexport interface BaseAnnotation<S = AppChatStreamState> {\n  state: S;\n  display?: string;\n}\n\nexport interface TraceAnnotation extends BaseAnnotation<AppChatStreamState.TRACE> {\n  context: { langfuse_url: string };\n}\n\nexport interface SourceNodesAnnotation extends BaseAnnotation<AppChatStreamState.SOURCE_NODES> {\n  context: ChatMessageSource[];\n}\n\nexport interface RefineQuestionAnnotation extends BaseAnnotation<AppChatStreamState.REFINE_QUESTION> {\n  message?: string;\n}\n\nexport type ChatMessageAnnotation =\n  BaseAnnotation<Exclude<AppChatStreamState, AppChatStreamState.TRACE | AppChatStreamState.SOURCE_NODES | AppChatStreamState.REFINE_QUESTION>>\n  | TraceAnnotation\n  | SourceNodesAnnotation\n  | RefineQuestionAnnotation;\n\nexport interface StackVMStateAnnotation extends BaseAnnotation<StackVMState> {\n}\n\nexport type ChatInitialData = {\n  chat: Chat;\n  user_message: ChatMessage;\n  assistant_message: ChatMessage;\n}\n\n// FIXME: Server will not return updated_at\nexport function fixChatInitialData (data: any) {\n  if (data.assistant_message) {\n    if (!data.assistant_message.updated_at) {\n      data.assistant_message.updated_at = data.assistant_message.created_at;\n    }\n  }\n  return data as any;\n}\n\nexport const chatDataPartSchema = z.object({\n  chat: chatSchema,\n  user_message: chatMessageSchema,\n  assistant_message: chatMessageSchema,\n}) satisfies ZodType<ChatInitialData, any, any>;\n"
  },
  {
    "path": "frontend/app/src/components/chat/chat-stream.state.test.ts",
    "content": "import { chatDataPartSchema, fixChatInitialData } from '@/components/chat/chat-stream-state';\n\nit('pass', () => {\n  const result = chatDataPartSchema.safeParse(fixChatInitialData({\n    'chat': {\n      'updated_at': '2024-07-08T10:28:39',\n      'id': '019091e3-5cfc-74a3-b5e0-653a73f52af2',\n      'title': 'What is TiDB?',\n      'engine_id': 1,\n      'engine_options': {\"llm\":{\"provider\":\"openai\",\"openai_chat_model\":\"gpt-4o\",\"gemini_chat_model\":\"models/gemini-2.0-flash\",\"reranker_provider\":\"jinaai\",\"reranker_top_k\":10,\"intent_graph_knowledge\":\"Given a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\\\\n\\\\nKnowledge sub-queries:\\\\n\\\\n{% for sub_query, data in sub_queries.items() %}\\\\n\\\\nSub-query: {{ sub_query }}\\\\n\\\\n  - Entities:\\\\n\\\\n{% for entity in data[\\'entities\\'] %}\\\\n\\\\n    - Name: {{ entity.name }}\\\\n    - Description: {{ entity.description }}\\\\n\\\\n{% endfor %}\\\\n\\\\n  - Relationships:\\\\n\\\\n{% for relationship in data[\\'relationships\\'] %}\\\\n\\\\n    - Description: {{ relationship.rag_description }}\\\\n    - Last Modified At: {{ relationship.last_modified_at }}\\\\n    - Meta: {{ relationship.meta | tojson(indent=2) }}\\\\n\\\\n{% endfor %}\\\\n\\\\n{% endfor %}\\\\n\",\"normal_graph_knowledge\":\"Given a list of relationships of a knowledge graph as follows. When there is a conflict in meaning between knowledge relationships, the relationship with the higher `weight` and newer `last_modified_at` value takes precedence.\\\\n\\\\n---------------------\\\\nEntities:\\\\n\\\\n{% for entity in entities %}\\\\n\\\\n- Name: {{ entity.name }}\\\\n- Description: {{ entity.description }}\\\\n\\\\n{% endfor %}\\\\n\\\\n---------------------\\\\n\\\\nKnowledge relationships:\\\\n\\\\n{% for relationship in relationships %}\\\\n\\\\n- Description: {{ relationship.rag_description }}\\\\n- Weight: {{ relationship.weight }}\\\\n- Last Modified At: {{ relationship.last_modified_at }}\\\\n- Meta: {{ relationship.meta | tojson(indent=2) }}\\\\n\\\\n{% endfor %}\\\\n\"},\"knowledge_graph\":{\"enabled\":true,\"depth\":2,\"include_meta\":true,\"with_degree\":false,\"using_intent_search\":true}},\n      'user_id': null,\n      'browser_id': null,\n      'created_at': '2024-07-08T10:28:39',\n      'deleted_at': null,\n      'origin': null,\n    },\n    'user_message': {\n      'id': 60033,\n      'created_at': '2024-07-08T10:28:40',\n      'role': 'user',\n      'trace_url': null,\n      'finished_at': null,\n      'user_id': null,\n      'updated_at': '2024-07-08T10:28:40',\n      'ordinal': 1,\n      'content': 'What is TiDB?',\n      'error': null,\n      'sources': [],\n      'chat_id': '019091e3-5cfc-74a3-b5e0-653a73f52af2',\n      \"post_verification_result_url\": null,\n    },\n    'assistant_message': {\n      'id': 60034,\n      'created_at': '2024-07-08T10:28:40',\n      'role': 'assistant',\n      'trace_url': 'https://us.cloud.langfuse.com/trace/fd18e8c4-94b7-4b6a-a6e7-3877f07a3d2d',\n      'finished_at': '2024-07-08T10:30:12.129128Z',\n      'user_id': null,\n      'ordinal': 2,\n      'content': '### Comprehensive Overview of TiDB\\n\\n#### What is TiDB?\\nTiDB is an open-source distributed SQL database designed to support Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL-compatible and offers horizontal scalability, strong consistency, and high availability[^1][^2]. TiDB aims to provide a one-stop solution for OLTP (Online Transactional Processing), OLAP (Online Analytical Processing), and HTAP services, making it suitable for various use cases that require high availability and strong consistency with large-scale data[^3].\\n\\n#### Architecture of TiDB\\nThe TiDB cluster consists of three main components[^4][^5]:\\n1. **TiDB Server**: Handles SQL parsing, query planning, and execution.\\n2. **TiKV Server**: Acts as the distributed key-value storage engine, storing the actual data.\\n3. **PD (Placement Driver) Server**: Manages cluster metadata, allocates timestamps, and handles data placement and load balancing.\\n\\nAdditionally, TiDB includes:\\n- **TiFlash**: A columnar storage engine for analytical workloads, providing high concurrency for `INSERT` and `UPDATE` operations without impacting OLTP performance[^6].\\n- **TiSpark**: A connector that enables Spark to access data stored in TiDB[^7].\\n- **TiDB Binlog**: A tool for capturing and replicating data changes[^8].\\n- **TiDB Lightning**: A high-performance tool for importing data into TiDB[^9].\\n\\n#### Key Features of TiDB\\n1. **Horizontal Scalability**: TiDB allows for easy horizontal scaling of both computing and storage resources, making it adaptable to changing workloads[^10]. The architecture separates computing from storage, enabling independent scaling[^11].\\n2. **High Availability**: TiDB ensures high availability through data replication and the Multi-Raft protocol, guaranteeing data integrity even in the event of failures[^12]. It supports automatic failover when a minority of replicas fail, making it transparent to applications[^13].\\n3. **HTAP Capabilities**: TiDB supports both row-based (TiKV) and columnar (TiFlash) storage engines, enabling real-time processing of both transactional and analytical workloads[^14].\\n4. **Cloud-Native Design**: TiDB is built for cloud environments, offering flexible scalability, reliability, and security on various cloud platforms[^15]. It integrates seamlessly with Kubernetes and offers a fully-managed service (TiDB Cloud)[^16].\\n5. **MySQL Compatibility**: TiDB is compatible with the MySQL 5.7 protocol and ecosystem, allowing for easy migration of applications with minimal code changes[^17]. However, it does not support triggers, stored procedures, and user-defined functions[^18].\\n\\n#### Ensuring High Availability, Scalability, and Performance\\n- **High Availability**: TiDB achieves high availability through its multi-replica architecture and the Multi-Raft protocol, which ensures that data is consistently replicated across multiple nodes[^19]. Transactions can only be committed when data has been successfully written into the majority of replicas[^20].\\n- **Scalability**: TiDB\\'s architecture allows for flexible and elastic scaling by separating computing from storage. This design enables users to scale out or scale in the computing or storage capacity online as needed[^21].\\n- **Performance**: TiDB provides high performance through various optimizations, including the use of TiFlash for analytical workloads and the DeltaTree structure for efficient data modification[^22]. The system also supports distributed transactions using a two-phase commit protocol with optimizations inspired by Google\\'s Percolator[^23].\\n\\n#### Compatibility with MySQL\\nTiDB supports most MySQL 5.7 syntax and features, making it highly compatible with MySQL applications[^24]. This compatibility allows users to migrate applications to TiDB without changing a single line of code in many cases[^25]. However, certain features like triggers, stored procedures, and user-defined functions are not supported[^26].\\n\\n### Conclusion\\nTiDB is a robust, scalable, and high-performance distributed SQL database designed for modern data workloads. Its architecture, key features, and compatibility with MySQL make it a versatile solution for various use cases, ensuring high availability, scalability, and performance.\\n\\n[^1]: [TiDB Overview | PingCAP Docs](https://docs.pingcap.com/tidb/stable/overview)\\n[^2]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^3]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq)\\n[^4]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-architecture)\\n[^5]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/tidb-faq)\\n[^6]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-faq)\\n[^7]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidbcloud/tidb-architecture)\\n[^8]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-architecture)\\n[^9]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-architecture)\\n[^10]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/overview)\\n[^11]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^12]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^13]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-architecture)\\n[^14]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/overview)\\n[^15]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^16]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/overview)\\n[^17]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^18]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-faq)\\n[^19]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v5.4/overview)\\n[^20]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/overview)\\n[^21]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/overview)\\n[^22]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-faq)\\n[^23]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v6.5/tidb-faq)\\n[^24]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-faq)\\n[^25]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-architecture)\\n[^26]: [TiDB Introduction and Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/tidb-faq)',\n      'error': null,\n      \"post_verification_result_url\": null,\n      'sources': [\n        {\n          'id': 8247,\n          'name': 'Overview',\n          'source_uri': 'https://docs.pingcap.com/tidb/v7.5/tidb-architecture',\n        }, {\n          'id': 8600,\n          'name': 'TiDB FAQs',\n          'source_uri': 'https://docs.pingcap.com/tidb/v7.5/tidb-faq',\n        }, {\n          'id': 9057,\n          'name': 'Overview',\n          'source_uri': 'https://docs.pingcap.com/tidbcloud/tidb-architecture',\n        }, {\n          'id': 9373,\n          'name': 'TiDB Introduction',\n          'source_uri': 'https://docs.pingcap.com/tidb/v7.1/overview',\n        }, {\n          'id': 9865,\n          'name': 'Overview',\n          'source_uri': 'https://docs.pingcap.com/tidb/v7.1/tidb-architecture',\n        }, {\n          'id': 10191,\n          'name': 'TiDB FAQs',\n          'source_uri': 'https://docs.pingcap.com/tidb/v7.1/tidb-faq',\n        }, {\n          'id': 10578,\n          'name': 'TiDB Introduction',\n          'source_uri': 'https://docs.pingcap.com/tidb/v6.5/overview',\n        }, {\n          'id': 11370,\n          'name': 'TiDB FAQs',\n          'source_uri': 'https://docs.pingcap.com/tidb/v6.5/tidb-faq',\n        }, {\n          'id': 12985,\n          'name': 'TiDB Introduction',\n          'source_uri': 'https://docs.pingcap.com/tidb/v5.4/overview',\n        },\n      ],\n      'chat_id': '019091e3-5cfc-74a3-b5e0-653a73f52af2',\n    },\n  } as any));\n\n  if (!result.success) {\n    throw result.error;\n  }\n});"
  },
  {
    "path": "frontend/app/src/components/chat/chats-history.tsx",
    "content": "import { type Chat, deleteChat, listChats } from '@/api/chats';\nimport { useAuth } from '@/components/auth/AuthProvider';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { Button } from '@/components/ui/button';\nimport { SidebarMenuSkeleton } from '@/components/ui/sidebar';\nimport { cn } from '@/lib/utils';\nimport { TrashIcon } from 'lucide-react';\nimport { usePathname } from 'next/navigation';\nimport { useEffect } from 'react';\nimport useSWR from 'swr';\n\nexport function ChatsHistory () {\n  const pathname = usePathname();\n  const auth = useAuth();\n  const user = auth.me;\n  const { data: history, mutate, isLoading, isValidating } = useSWR('api.chats.list?size=8', () => listChats({ size: 8 }), {\n    revalidateOnMount: false,\n    keepPreviousData: true,\n  });\n\n  useEffect(() => {\n    void mutate(() => undefined, { revalidate: true });\n  }, [user?.id]);\n\n  const isActive = (chat: Chat) => pathname === `/c/${chat.id}`;\n\n  return (\n    <div style={{ paddingLeft: 25 }}>\n      {isLoading && (\n        <>\n          <SidebarMenuSkeleton />\n          <SidebarMenuSkeleton />\n          <SidebarMenuSkeleton />\n        </>\n      )}\n      <ul className={cn('w-full overflow-hidden space-y-1 transition-opacity', isValidating && 'opacity-50')}>\n        {history?.items.map(chat => (\n          <li key={chat.id} className=\"flex gap-2 items-center\">\n            <NextLink href={`/c/${chat.id}`} data-active={isActive(chat) ? 'true' : undefined} variant={isActive(chat) ? 'secondary' : 'ghost'} className=\"flex-1 opacity-80 text-xs p-2 py-1.5 h-max font-light w-[86%] block whitespace-nowrap overflow-hidden overflow-ellipsis data-[active]:font-semibold transition-opacity text-left ellipsis\">\n              {chat.title}\n            </NextLink>\n            <DangerousActionButton\n              asChild\n              action={async () => {\n                await deleteChat(chat.id).finally(() => mutate(history => history, { revalidate: true }));\n              }}\n              dialogTitle={`Are you sure to delete ${chat.title}?`}\n              dialogDescription=\"This action cannot be undone.\"\n            >\n              <Button className=\"flex-shrink-0 w-max h-max rounded-full p-1 hover:bg-transparent\" size=\"icon\" variant=\"ghost\">\n                <TrashIcon className=\"w-3 h-3 opacity-20 hover:opacity-60\" />\n              </Button>\n            </DangerousActionButton>\n          </li>\n        ))}\n      </ul>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/chat/chats-table.tsx",
    "content": "'use client';\n\nimport { type Chat, deleteChat, listChats } from '@/api/chats';\nimport { actions } from '@/components/cells/actions';\nimport { datetime } from '@/components/cells/datetime';\nimport { link } from '@/components/cells/link';\nimport { metadataCell } from '@/components/cells/metadata';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { Trash2Icon } from 'lucide-react';\n\nexport function ChatsTable () {\n  return (\n    <DataTableRemote\n      idColumn=\"id\"\n      apiKey=\"api.chats.list\"\n      api={listChats}\n      columns={columns as any}\n    />\n  );\n}\n\nconst helper = createColumnHelper<Chat>();\n\nconst columns = [\n  helper.accessor('title', {\n    cell: link({ url: chat => `/c/${chat.id}` }),\n  }),\n  helper.accessor('origin', {}),\n  helper.accessor('created_at', { cell: datetime }),\n  helper.accessor('engine_id', {}),\n  helper.accessor('engine_options', { cell: metadataCell }),\n  helper.display({\n    header: 'Operations',\n    cell: actions(chat => [\n      {\n        key: 'delete',\n        title: 'Delete',\n        icon: <Trash2Icon className=\"size-3\" />,\n        dangerous: {\n          dialogTitle: 'Are you sure to delete this chat?',\n          dialogDescription: 'This action cannot be undone.',\n        },\n        action: async ({ table }) => {\n          await deleteChat(chat.id);\n          table.reload?.();\n        },\n      },\n    ]),\n  }),\n];\n"
  },
  {
    "path": "frontend/app/src/components/chat/conversation-message-groups.scss",
    "content": "@keyframes highlight {\n  from {\n    @apply bg-amber-500/10;\n  }\n  to {\n    @apply bg-transparent;\n  }\n}\n\n.animate-highlight {\n  animation-name: highlight;\n  animation-delay: 2s;\n  animation-duration: 1s;\n  animation-timing-function: ease;\n  animation-fill-mode: both;\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/conversation-message-groups.tsx",
    "content": "import { type ChatMessageGroup, useChatInfo, useChatPostState, useCurrentChatController } from '@/components/chat/chat-hooks';\nimport { DebugInfo } from '@/components/chat/debug-info';\nimport { MessageAnnotationHistory } from '@/components/chat/message-annotation-history';\nimport { StackVMMessageAnnotationHistory } from '@/components/chat/message-annotation-history-stackvm';\nimport { MessageAnswer } from '@/components/chat/message-answer';\nimport { MessageAutoScroll } from '@/components/chat/message-auto-scroll';\nimport { MessageContextSources } from '@/components/chat/message-content-sources';\nimport { MessageError } from '@/components/chat/message-error';\nimport { MessageOperations } from '@/components/chat/message-operations';\nimport { MessageRecommendQuestions } from '@/components/chat/message-recommend-questions';\nimport { MessageSection } from '@/components/chat/message-section';\nimport { Button } from '@/components/ui/button';\nimport { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';\nimport { MessageVerify } from '@/experimental/chat-verify-service/message-verify';\nimport { cn } from '@/lib/utils';\nimport { InfoIcon } from 'lucide-react';\nimport { useEffect, useState } from 'react';\nimport './conversation-message-groups.scss';\n\nconst isWidgetEnv = !!process.env.NEXT_PUBLIC_IS_WIDGET;\n\nexport function ConversationMessageGroups ({ groups }: { groups: ChatMessageGroup[] }) {\n  const controller = useCurrentChatController();\n  const { params, initialized } = useChatPostState(useCurrentChatController());\n\n  useEffect(() => {\n    if (!isWidgetEnv) {\n      const scroll = () => {\n        setTimeout(() => {\n          window.scrollTo({\n            left: 0,\n            top: document.body.scrollHeight,\n            behavior: 'smooth',\n          });\n        }, 100);\n      };\n\n      controller\n        .on('post', scroll)\n        .on('post-initialized', scroll);\n\n      return () => {\n        controller\n          .off('post', scroll)\n          .off('post-initialized', scroll);\n      };\n    }\n  }, [controller]);\n\n  return (\n    <div className=\"space-y-8 pb-16\">\n      {groups.map((group, index) => (\n        <ConversationMessageGroup\n          key={group.user.id}\n          group={group}\n          isLastGroup={index === groups.length - 1}\n        />\n      ))}\n      {!!params && !initialized && (\n        <section\n          className={cn('opacity-50 pointer-events-none space-y-6 p-4 pt-12 border-b pb-10 last-of-type:border-b-0 last-of-type:border-pb-4')}\n        >\n          <div className=\"relative pr-12\">\n            <h2 className=\"text-2xl font-normal\">{params.content}</h2>\n          </div>\n        </section>\n      )}\n    </div>\n  );\n}\n\nfunction ConversationMessageGroup ({ group, isLastGroup }: { group: ChatMessageGroup, isLastGroup: boolean }) {\n  const enableDebug = /* !!me && */ !process.env.NEXT_PUBLIC_DISABLE_DEBUG_PANEL;\n  const { engine_options } = useChatInfo(useCurrentChatController()) ?? {};\n\n  const { params } = useChatPostState(useCurrentChatController());\n\n  const [debugInfoOpen, setDebugInfoOpen] = useState(false);\n  const [highlight, setHighlight] = useState(false);\n  useEffect(() => {\n    if (group.assistant && location.hash.slice(1) === String(group.assistant.id)) {\n      setHighlight(true);\n      document.getElementById(String(group.assistant.id))?.scrollIntoView({ behavior: 'instant', block: 'start' });\n    }\n  }, []);\n\n  return (\n    <section\n      id={group.assistant && String(group.assistant.id)}\n      className={cn('space-y-6 p-4 pt-12 border-b pb-10 last-of-type:border-b-0 last-of-type:border-pb-4', highlight && 'animate-highlight')}\n      onAnimationEnd={() => setHighlight(false)}\n    >\n      <Collapsible open={debugInfoOpen} onOpenChange={setDebugInfoOpen}>\n        <div className=\"relative pr-12\">\n          <h2 className=\"text-2xl font-normal whitespace-pre-wrap\">{group.user.content}</h2>\n          {enableDebug && <CollapsibleTrigger asChild>\n            <Button className=\"absolute right-0 top-0 z-0 rounded-full\" variant=\"ghost\" size=\"sm\">\n              <InfoIcon className=\"h-4 w-4\" />\n              <span className=\"sr-only\">Toggle</span>\n            </Button>\n          </CollapsibleTrigger>}\n        </div>\n        <CollapsibleContent>\n          <DebugInfo group={group} />\n        </CollapsibleContent>\n      </Collapsible>\n\n      {group.assistant?.version === 'Legacy' && <MessageAnnotationHistory message={group.assistant} />}\n      {group.assistant?.version === 'StackVM' && <StackVMMessageAnnotationHistory message={group.assistant} />}\n\n      {!engine_options?.hide_sources && <MessageSection className=\"!mt-1\" message={group.assistant}>\n        <MessageContextSources message={group.assistant} />\n      </MessageSection>}\n\n      <MessageSection className=\"space-y-2\" message={group.assistant}>\n        <MessageAnswer message={group.assistant} showBetaAlert={group.hasFirstAssistantMessage} />\n        {group.assistant && <MessageAutoScroll message={group.assistant} />}\n      </MessageSection>\n\n      {group.assistant && <MessageError message={group.assistant} />}\n\n      {group.assistant && <MessageOperations message={group.assistant} />}\n\n      <MessageVerify assistant={group.assistant} />\n\n      {/* Only show recommend questions if enabled in engine_options */}\n      {!params && isLastGroup && group.hasLastAssistantMessage && engine_options?.further_questions !== false && (\n        <MessageRecommendQuestions assistant={group.assistant} />\n      )}\n    </section>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/conversation.test.tsx",
    "content": "describe('Conversation component', () => {\n  test('button should be disabled when input is empty', () => {\n    // Create a simple test scenario without the full component complexity\n    const textarea = document.createElement('textarea');\n    const button = document.createElement('button');\n    \n    // Simulate the input validation logic\n    const validateInput = (value: string) => {\n      return !value.trim();\n    };\n\n    // Test empty input\n    textarea.value = '';\n    button.disabled = validateInput(textarea.value);\n    expect(button.disabled).toBe(true);\n\n    // Test whitespace only\n    textarea.value = ' ';\n    button.disabled = validateInput(textarea.value);\n    expect(button.disabled).toBe(true);\n\n    // Test whitespace with tab\n    textarea.value = ' \\t';\n    button.disabled = validateInput(textarea.value);\n    expect(button.disabled).toBe(true);\n\n    // Test with actual content\n    textarea.value = 'foo';\n    button.disabled = validateInput(textarea.value);\n    expect(button.disabled).toBe(false);\n  });\n});\n"
  },
  {
    "path": "frontend/app/src/components/chat/conversation.tsx",
    "content": "'use client';\n\nimport type { Chat, ChatMessage } from '@/api/chats';\nimport type { ChatController } from '@/components/chat/chat-controller';\nimport { ChatControllerProvider, useChatController, useChatMessageControllers, useChatMessageGroups, useChatPostState } from '@/components/chat/chat-hooks';\nimport { ConversationMessageGroups } from '@/components/chat/conversation-message-groups';\nimport { MessageInput } from '@/components/chat/message-input';\nimport { SecuritySettingContext, withReCaptcha } from '@/components/security-setting-provider';\nimport { useSize } from '@/components/use-size';\nimport { cn } from '@/lib/utils';\nimport { type ChangeEvent, type FormEvent, type ReactNode, type Ref, useContext, useImperativeHandle, useState } from 'react';\n\nexport interface ConversationProps {\n  chatId?: string;\n\n  className?: string;\n  open: boolean;\n  chat: Chat | undefined;\n  history: ChatMessage[];\n\n  /* Only for widgets */\n  placeholder?: (controller: ChatController, postState: ReturnType<typeof useChatPostState>) => ReactNode;\n  preventMutateBrowserHistory?: boolean;\n  preventShiftMessageInput?: boolean;\n  newChatRef?: Ref<ChatController['post'] | undefined>;\n}\n\nexport function Conversation ({ open, chat, chatId, history, placeholder, preventMutateBrowserHistory = false, preventShiftMessageInput = false, newChatRef, className }: ConversationProps) {\n  const [inputElement, setInputElement] = useState<HTMLTextAreaElement | null>(null);\n\n  const controller = useChatController(chatId, chat, history, inputElement);\n  const postState = useChatPostState(controller);\n  const groups = useChatMessageGroups(useChatMessageControllers(controller));\n\n  const [input, setInput] = useState('');\n  const handleInputChange = (e: ChangeEvent<HTMLTextAreaElement>) => {\n    setInput(e.target.value);\n  };\n\n  const { ref, size } = useSize();\n\n  const security = useContext(SecuritySettingContext);\n\n  const submitWithReCaptcha = async (e: FormEvent<HTMLFormElement>) => {\n    e.preventDefault();\n    withReCaptcha({\n      action: 'chat',\n      siteKey: security?.google_recaptcha_site_key || '',\n      mode: security?.google_recaptcha,\n    }, ({ token, action }) => {\n      controller.post({\n        content: input,\n        headers: {\n          'X-Recaptcha-Token': token,\n          'X-Recaptcha-Action': action,\n        },\n      });\n      setInput('');\n    });\n  };\n\n  const disabled = !!postState.params;\n  const actionDisabled = disabled || !input.trim();\n\n  useImperativeHandle(newChatRef, () => {\n    return controller.post.bind(controller);\n  }, [controller]);\n\n  return (\n    <ChatControllerProvider controller={controller}>\n      {!postState.params && !groups.length && placeholder?.(controller, postState)}\n      <div ref={ref} className={cn(\n        'mx-auto space-y-4 transition-all relative md:max-w-screen-md md:min-h-screen md:p-body',\n        className,\n      )}>\n        <ConversationMessageGroups groups={groups} />\n        <div className=\"h-24\"></div>\n      </div>\n      {size && open && <form className={cn('block h-max p-4 fixed bottom-0', preventShiftMessageInput && 'absolute pb-0')} onSubmit={submitWithReCaptcha} style={{ left: (preventShiftMessageInput ? 0 : size.x) + 16, width: size.width - 32 }}>\n        <MessageInput inputRef={setInputElement} className=\"w-full transition-all\" disabled={disabled} actionDisabled={actionDisabled} inputProps={{ value: input, onChange: handleInputChange, disabled }} />\n      </form>}\n    </ChatControllerProvider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/debug-info.tsx",
    "content": "import { type ChatMessageGroup, useChatInfo, useChatMessageField, useCurrentChatController } from '@/components/chat/chat-hooks';\nimport { KnowledgeGraphDebugInfo } from '@/components/chat/knowledge-graph-debug-info';\nimport { DateFormat } from '@/components/date-format';\nimport { OptionDetail } from '@/components/option-detail';\n// import { MessageLangfuse } from '@/components/chat/message-langfuse';\nimport { differenceInSeconds } from 'date-fns';\nimport { WorkflowIcon } from 'lucide-react';\nimport { useMemo } from 'react';\n\nexport interface DebugInfoProps {\n  group: ChatMessageGroup;\n}\n\nexport function DebugInfo ({ group }: DebugInfoProps) {\n  const chat = useChatInfo(useCurrentChatController());\n  const traceURL = useChatMessageField(group.assistant, 'trace_url');\n  const createdAt = useChatMessageField(group.assistant, 'created_at');\n  const finishedAt = useChatMessageField(group.assistant, 'finished_at');\n\n  const stackVMUrl = useMemo(() => {\n    if (traceURL) {\n      try {\n        const url = new URL(traceURL);\n        if (url.host === 'stackvm.tidb.ai') {\n          const id = url.searchParams.get('task_id');\n          return `https://stackvm-ui.vercel.app/tasks/${id}`;\n        }\n      } catch {\n        return undefined;\n      }\n    }\n  }, [traceURL]);\n\n  return (\n    <div className=\"my-2 p-4 space-y-4 bg-card border rounded text-xs\">\n      {traceURL && <div className=\"flex items-center gap-4 text-xs flex-wrap\">\n        <a className=\"underline\" target=\"_blank\" href={stackVMUrl ?? traceURL}>\n          <WorkflowIcon className=\"inline w-3 h-3 mr-1\" />\n          Tracing URL\n        </a>\n      </div>}\n      {/*<MessageLangfuse group={group} />*/}\n      <KnowledgeGraphDebugInfo group={group} />\n      {chat && (\n        <section className=\"space-y-2\">\n          <div className=\"space-y-2 text-sm\">\n            <div className=\"space-y-2 text-sm\">\n              <OptionDetail title=\"Origin\" value={chat.origin} />\n              <OptionDetail title=\"Chat Created At\" value={<DateFormat date={chat.created_at} />} />\n              <OptionDetail title=\"Message Created At\" value={<DateFormat date={createdAt} />} />\n              <OptionDetail title=\"Message Finished In\" value={(createdAt && finishedAt) && `${differenceInSeconds(finishedAt, createdAt)} seconds`} />\n              <OptionDetail title=\"Chat Engine\" value={chat.engine_id} />\n            </div>\n          </div>\n        </section>\n      )}\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/knowledge-graph-debug-info.tsx",
    "content": "import { getChatMessageSubgraph } from '@/api/chats';\nimport { useAuth } from '@/components/auth/AuthProvider';\nimport { type ChatMessageGroup, useChatInfo, useChatMessageStreamState, useCurrentChatController } from '@/components/chat/chat-hooks';\nimport type { OngoingState } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState, type StackVMState } from '@/components/chat/chat-stream-state';\nimport { NetworkViewer } from '@/components/graph/components/NetworkViewer';\nimport { useNetwork } from '@/components/graph/useNetwork';\nimport { PencilIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { useEffect } from 'react';\nimport useSWR from 'swr';\n\nexport function KnowledgeGraphDebugInfo ({ group }: { group: ChatMessageGroup }) {\n  const { engine_options } = useChatInfo(useCurrentChatController()) ?? {};\n  const auth = useAuth();\n  const ongoing = useChatMessageStreamState(group.assistant);\n  const kbLinked = engine_options?.knowledge_base?.linked_knowledge_bases;\n  const canEdit = !!auth.me?.is_superuser && kbLinked;\n\n  const shouldFetch = (!ongoing || ongoing.finished || couldFetchKnowledgeGraphDebugInfo(ongoing));\n  const { data: span, isLoading, mutate, error } = useSWR(\n    shouldFetch && `api.chats.get-message-subgraph?id=${group.user.id}`,\n    () => getChatMessageSubgraph(group.user.id),\n    {\n      revalidateOnReconnect: false,\n      revalidateOnFocus: false,\n      revalidateOnMount: false,\n    },\n  );\n\n  useEffect(() => {\n    if (shouldFetch && !error && !isLoading && !span) {\n      mutate(undefined, true);\n    }\n  }, [span, isLoading, error, shouldFetch]);\n\n  const network = useNetwork(span);\n\n  return (\n    <NetworkViewer\n      className=\"my-2 border rounded w-full aspect-video\"\n      loading={!shouldFetch || isLoading}\n      loadingTitle={shouldFetch ? 'Loading knowledge graph...' : 'Waiting knowledge graph request...'}\n      network={network}\n      Details={\n        ({ target, network }) => {\n          if (!canEdit) return null;\n\n          if (!kbLinked) return null;\n\n          if (kbLinked.length === 1) {\n            return (\n              <Link href={`/knowledge-bases/${kbLinked[0].id}/knowledge-graph-explorer?query=${encodeURIComponent(`message-subgraph:${group.user.id}`)}`} className=\"absolute top-2 right-2 text-xs underline\">\n                <PencilIcon className=\"w-3 h-3 mr-1 inline-block\" />\n                Edit graph\n              </Link>\n            );\n          }\n\n          const placeholder = <span className=\"text-muted-foreground absolute top-2 right-2 text-xs underline cursor-not-allowed\">\n            <PencilIcon className=\"w-3 h-3 mr-1 inline-block\" />\n            Edit graph\n          </span>;\n\n          if (!target) return placeholder;\n\n          if (target.type === 'node') {\n            const node = network.node(target.id);\n            if (!node?.knowledge_base_id) return placeholder;\n            return (\n              <Link href={`/knowledge-bases/${node.knowledge_base_id}/knowledge-graph-explorer?query=${encodeURIComponent(`message-subgraph:${group.user.id}`)}`} className=\"absolute top-2 right-2 text-xs underline\">\n                <PencilIcon className=\"w-3 h-3 mr-1 inline-block\" />\n                Edit graph\n              </Link>\n            );\n          } else if (target.type === 'link') {\n            const link = network.node(target.id);\n            if (!link?.knowledge_base_id) return placeholder;\n            return (\n              <Link href={`/knowledge-bases/${link.knowledge_base_id}/knowledge-graph-explorer?query=${encodeURIComponent(`message-subgraph:${group.user.id}`)}`} className=\"absolute top-2 right-2 text-xs underline\">\n                <PencilIcon className=\"w-3 h-3 mr-1 inline-block\" />\n                Edit graph\n              </Link>\n            );\n          }\n\n          return placeholder;\n        }\n      }\n    />\n  );\n}\n\nfunction couldFetchKnowledgeGraphDebugInfo (state: OngoingState<AppChatStreamState | StackVMState>) {\n  switch (state.state) {\n    case AppChatStreamState.GENERATE_ANSWER:\n    case AppChatStreamState.FINISHED:\n    case AppChatStreamState.RERANKING:\n    case AppChatStreamState.SOURCE_NODES:\n      return true;\n    default:\n      return false;\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-annotation-history-stackvm.tsx",
    "content": "import { useChatMessageField, useChatMessageStreamHistoryStates, useChatMessageStreamState } from '@/components/chat/chat-hooks';\nimport { type OngoingState, type OngoingStateHistoryItem, StackVMChatMessageController } from '@/components/chat/chat-message-controller';\nimport type { StackVMState, StackVMToolCall } from '@/components/chat/chat-stream-state';\nimport { isNotFinished } from '@/components/chat/utils';\nimport { DiffSeconds } from '@/components/diff-seconds';\nimport { RemarkContent } from '@/components/remark-content';\nimport { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';\nimport { StackVM } from '@/lib/stackvm';\nimport { cn } from '@/lib/utils';\nimport { motion, type Target } from 'framer-motion';\nimport { CheckCircleIcon, ChevronUpIcon, ClockIcon, InfoIcon, Loader2Icon, SearchIcon } from 'lucide-react';\nimport { useEffect, useMemo, useState } from 'react';\n\nexport function StackVMMessageAnnotationHistory ({ message }: { message: StackVMChatMessageController | undefined }) {\n  const [show, setShow] = useState(true);\n  const history = useChatMessageStreamHistoryStates(message);\n  const current = useChatMessageStreamState(message);\n  const error = useChatMessageField(message, 'error');\n  const traceUrl = useChatMessageField(message, 'trace_url');\n\n  const finished = !isNotFinished(current) || !!error;\n\n  const stackVMTaskUrl = useMemo(() => {\n    if (traceUrl) {\n      return traceUrl;\n    }\n    let id: string | undefined;\n    if (current) {\n      id = current.state.task_id;\n    } else {\n      id = history?.[0]?.state.state.task_id;\n    }\n    if (id) {\n      return `https://stackvm.tidb.ai/?task_id=${id}`;\n    }\n    return undefined;\n  }, [traceUrl, history, current]);\n\n  const stackVmUI = useMemo(() => {\n    if (!stackVMTaskUrl) {\n      return undefined;\n    }\n    try {\n      const url = new URL(stackVMTaskUrl);\n\n      const taskId = url.searchParams.get('task_id');\n\n      if (!taskId) {\n        return stackVMTaskUrl;\n      }\n\n      return `https://stackvm-ui.vercel.app/tasks/${taskId}`;\n    } catch {\n      return stackVMTaskUrl;\n    }\n  }, [stackVMTaskUrl]);\n\n  useEffect(() => {\n    if (finished) {\n      const handler = setTimeout(() => {\n        setShow(false);\n      }, 2000);\n      return () => {\n        clearTimeout(handler);\n      };\n    }\n  }, [finished]);\n\n  if (!history) {\n    return <div className=\"!mt-5\" />;\n  }\n\n  return (\n    <div className=\"!mt-1\">\n      <motion.div\n        animate={show ? { height: 'auto', opacity: 1, scale: 1, pointerEvents: 'auto' } : { height: 0, opacity: 0, scale: 0.3, pointerEvents: 'none' }}\n        style={{\n          transformOrigin: 'left top',\n        }}\n      >\n        <ol\n          className=\"text-sm mt-4\"\n        >\n          {history?.map((item, index, history) => (\n            <MessageAnnotationHistoryItem key={index} index={index} history={history} item={item} />\n          ))}\n          {error && <MessageAnnotationHistoryError history={history} error={error} />}\n          {current && !current.finished && <MessageAnnotationCurrent history={history} current={current} />}\n        </ol>\n        {stackVmUI && <div className=\"mt-2 text-xs\">\n          Visit <a className=\"underline\" target=\"_blank\" href={stackVmUI}>StackVM</a> to see more details\n        </div>}\n        <button className=\"flex items-center gap-1 text-xs text-muted-foreground hover:text-foreground transition-colors\" onClick={() => setShow(false)}>\n          <ChevronUpIcon className=\"size-4 mr-1\" />\n          Collapse\n        </button>\n      </motion.div>\n      <motion.button\n        onClick={() => setShow(true)}\n        className=\"flex items-center gap-1 text-xs text-muted-foreground hover:text-foreground transition-colors\"\n        animate={show ? { height: 0, opacity: 0, overflow: 'visible', pointerEvents: 'none', scale: 0.5 } : { height: 'auto', opacity: 1, scale: 1, pointerEvents: 'auto' }}\n      >\n        <ClockIcon className=\"size-3\" />\n        {error ? 'Not finished' : <DiffSeconds from={message?.message?.created_at} to={message?.message?.finished_at} />}\n      </motion.button>\n    </div>\n  );\n}\n\nconst CheckedCircle = motion(CheckCircleIcon);\nconst InformationCircle = motion(InfoIcon);\n\nconst itemInitial: Target = { opacity: 0.5 };\nconst itemAnimate: Target = { opacity: 1 };\n\nconst itemIconInitial: Target = { color: 'rgb(113 113 122 / 50)' };\nconst itemSuccessIconAnimate: Target = { color: 'rgb(34 197 94)' };\nconst itemErrorIconAnimate: Target = { color: 'rgb(239 68 68)' };\n\nfunction StackVMCheckpoint ({ state, pc }: { state: StackVMState, pc: boolean }) {\n  const step = useMemo(() => {\n    return state.state.plan.steps.find(step => step.id === `step:${pc ? state.state.program_counter : state.seq_no}`);\n  }, [state.state, state.seq_no, pc]);\n\n  if (!step) {\n    return null;\n  }\n\n  switch (step.type) {\n    case 'reasoning':\n      return 'Thoughts';\n    case 'assign':\n      return `Assign Variables`;\n    case 'calling':\n      return `Tool Call`;\n    case 'jmp':\n      return `Jump`;\n  }\n}\n\nfunction StackVMDetails ({ pc, state }: { state: StackVMState, pc: boolean }) {\n  const step = useMemo(() => {\n    return state.state.plan.steps.find(step => step.id === `step:${pc ? state.state.program_counter : state.seq_no}`);\n  }, [state.state, state.seq_no, pc]);\n\n  if (!step) {\n    return null;\n  }\n\n  switch (step.type) {\n    case 'reasoning':\n      return <RemarkContent className=\"ml-2 pl-4 text-muted-foreground text-xs border-l border-l-green-500/50 pt-1 prose-strong:text-muted-foreground\">{(step as StackVM.model.StepModel<'reasoning'>).parameters.chain_of_thoughts}</RemarkContent>;\n    case 'calling':\n      return (\n        <div className=\"ml-2 pl-4 text-muted-foreground text-xs border-l border-l-green-500/50 pt-1\">\n          <ToolCallInfo step={step as StackVM.model.StepModel<'calling'>} vars={state.state.variables??{}} pc={state.state.program_counter} toolCalls={state.toolCalls} />\n        </div>\n      );\n    case 'assign':\n      return (\n        <div className=\"ml-2 pl-4 text-muted-foreground text-xs border-l border-l-green-500/50 pt-1\">\n          <ul className=\"space-y-1 block\">\n            {Object.entries(step.parameters).map(([key, value]) => (\n              <li key={key} className=\"flex gap-2 items-center\">\n                <div><code>{key}</code>:</div>\n                <JsonValueViewer value={value} />\n              </li>\n            ))}\n          </ul>\n        </div>\n      );\n    default:\n      return null;\n  }\n}\n\nfunction ToolCallInfo ({ vars, step, pc, toolCalls }: { step: StackVM.model.StepModel<'calling'>, vars: Record<string, unknown>, pc: number | undefined, toolCalls: StackVMToolCall[] }) {\n  const result = toolCalls.find(tc => tc.toolCallId === `${pc}`)?.result;\n\n  return (\n    <div className=\"space-y-1\">\n      <div>\n        <b><code>{step.parameters.tool_name}</code></b>\n        {' '}\n        <span>(</span>\n      </div>\n      <ul className=\"space-y-1\">\n        {Object.entries(step.parameters.tool_params).map(([key, value]) => (\n          <li key={key} className=\"ml-2 flex gap-2 items-center\">\n            <div><code>{key}</code>:</div>\n            <JsonValueViewer value={value} />\n          </li>\n        ))}\n      </ul>\n      <span>)</span>\n      {result != null && <>\n        <div className=\"flex items-center gap-2\">\n          <b>Result:</b>\n          <span>{'{'}</span>\n        </div>\n        <ul className=\"space-y-1 block\">\n          {step.output_vars.map((binding) => (\n            <li key={binding.parameter ?? ''} className=\"ml-2 flex gap-2 items-center\">\n              <div><code>{binding.parameter}</code>:</div>\n              <JsonValueViewer value={result} />\n            </li>\n          ))}\n        </ul>\n        <div>{'}'}</div>\n      </>}\n    </div>\n  );\n}\n\nfunction JsonValueViewer ({ value }: { value: unknown }) {\n  if (value == null) {\n    return String(value);\n  }\n\n  if (typeof value === 'object') {\n    let label: string;\n    if (value instanceof Array) {\n      label = `array<${value.length} items>`;\n    } else {\n      label = `object<${Object.keys(value).length} entries>`;\n    }\n\n    return (\n      <Popover>\n        <PopoverTrigger className=\"inline-flex items-center\">\n          <SearchIcon className=\"size-3 mr-1\" />\n          {label}\n        </PopoverTrigger>\n        <PopoverContent className=\"max-w-[320px] max-h-[30vh] overflow-y-auto overflow-x-hidden\">\n          <pre className=\"text-xs w-full whitespace-pre-wrap\">\n            {JSON.stringify(value, undefined, 2)}\n          </pre>\n        </PopoverContent>\n      </Popover>\n    );\n  } else {\n    const isText = typeof value === 'string';\n    const string = String(value);\n    if (string.length > 25) {\n      return (\n        <Popover>\n          <PopoverTrigger className={cn('inline-flex items-center')}>\n            <SearchIcon className=\"size-3 mr-1 text-muted-foreground\" />\n            {isText && <span>{'\"'}</span>}{string.slice(0, 25) + '...'}{isText && <span>{'\"'}</span>}\n          </PopoverTrigger>\n          <PopoverContent className=\"max-w-[320px] max-h-[30vh] overflow-y-auto overflow-x-hidden\">\n            <pre className=\"text-xs w-full whitespace-pre-wrap\">\n              {string}\n            </pre>\n          </PopoverContent>\n        </Popover>\n      );\n    } else {\n      return (\n        <span className={cn()}>\n          {isText && <span>{'\"'}</span>}{string}{isText && <span>{'\"'}</span>}\n        </span>\n      );\n    }\n  }\n}\n\nfunction MessageAnnotationHistoryItem ({ history, item: { state, time }, index }: { history: OngoingStateHistoryItem<StackVMState>[], index: number, item: OngoingStateHistoryItem<StackVMState> }) {\n  return (\n    <motion.li className=\"relative mb-2\" initial={itemInitial} animate={itemAnimate}>\n      {index > 1 && <span className=\"absolute left-2 bg-green-500/50 h-2\" style={{ width: 1, top: -8 }} />}\n      <div className=\"flex gap-2 items-center\">\n        <CheckedCircle className=\"size-4\" initial={itemIconInitial} animate={itemSuccessIconAnimate} />\n        <span>{state.display === '[deprecated]' ? <StackVMCheckpoint state={state.state} pc={false} /> : state.display}</span>\n      </div>\n      <StackVMDetails state={state.state} pc={false} />\n    </motion.li>\n  );\n}\n\nfunction MessageAnnotationHistoryError ({ history, error }: { history: OngoingStateHistoryItem<StackVMState>[], error: string }) {\n  return (\n    <motion.li className=\"relative mb-2\" initial={itemInitial} animate={itemAnimate}>\n      {history.length > 0 && <span className=\"absolute left-2 bg-muted-foreground h-2\" style={{ width: 1, top: -8 }} />}\n      <div className=\"flex gap-2 items-center\">\n        <InformationCircle className=\"size-4\" initial={itemIconInitial} animate={itemErrorIconAnimate} />\n        <span>{error}</span>\n      </div>\n    </motion.li>\n  );\n}\n\nfunction MessageAnnotationCurrent ({ history, current }: { history: OngoingStateHistoryItem<StackVMState>[], current: OngoingState<StackVMState> }) {\n  return (\n    <motion.li\n      key={current.state?.state.program_counter}\n      className=\"relative\"\n      initial={{\n        opacity: 0,\n        height: 0,\n        x: -40,\n      }}\n      animate={{\n        opacity: 0.5,\n        height: 'auto',\n        x: 0,\n      }}\n    >\n      <div className=\"flex gap-2 items-center\">\n        {(history?.length ?? 0) > 1 && <span className=\"absolute left-2 h-2 bg-zinc-500/50\" style={{ width: 1, top: -8 }} />}\n        <Loader2Icon className=\"size-4 animate-spin repeat-infinite text-muted-foreground\" />\n        <span>{current.display === '[deprecated]' ? <StackVMCheckpoint state={current.state} pc /> : current.display}</span>\n      </div>\n      <StackVMDetails state={current.state} pc />\n    </motion.li>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-annotation-history.tsx",
    "content": "import { useChatMessageField, useChatMessageStreamHistoryStates, useChatMessageStreamState } from '@/components/chat/chat-hooks';\nimport { LegacyChatMessageController, type OngoingState, type OngoingStateHistoryItem } from '@/components/chat/chat-message-controller';\nimport { isNotFinished } from '@/components/chat/utils';\nimport { DiffSeconds } from '@/components/diff-seconds';\nimport { motion, type Target } from 'framer-motion';\nimport { CheckCircleIcon, ChevronUpIcon, ClockIcon, InfoIcon, Loader2Icon } from 'lucide-react';\nimport { useEffect, useState } from 'react';\n\nexport function MessageAnnotationHistory ({ message }: { message: LegacyChatMessageController | undefined }) {\n  const [show, setShow] = useState(true);\n  const history = useChatMessageStreamHistoryStates(message);\n  const current = useChatMessageStreamState(message);\n  const error = useChatMessageField(message, 'error');\n\n  const finished = !isNotFinished(current) || !!error;\n\n  useEffect(() => {\n    if (finished) {\n      const handler = setTimeout(() => {\n        setShow(false);\n      }, 2000);\n      return () => {\n        clearTimeout(handler);\n      };\n    }\n  }, [finished]);\n\n  if (!history) {\n    return <div className=\"!mt-5\" />;\n  }\n\n  return (\n    <div className=\"!mt-1\">\n      <motion.div\n        animate={show ? { height: 'auto', opacity: 1, scale: 1, pointerEvents: 'auto' } : { height: 0, opacity: 0, scale: 0.3, pointerEvents: 'none' }}\n        style={{\n          transformOrigin: 'left top',\n        }}\n      >\n        <ol\n          className=\"text-sm mt-4\"\n        >\n          {history?.map((item, index, history) => (\n            index > 0 && <MessageAnnotationHistoryItem key={index} index={index} history={history} item={item} />\n          ))}\n          {error && <MessageAnnotationHistoryError history={history} error={error} />}\n          {current && !current.finished && <MessageAnnotationCurrent history={history} current={current} />}\n        </ol>\n        <button className=\"flex items-center gap-1 text-xs text-muted-foreground hover:text-foreground transition-colors\" onClick={() => setShow(false)}>\n          <ChevronUpIcon className=\"size-4 mr-1\" />\n          Collapse\n        </button>\n      </motion.div>\n      <motion.button\n        onClick={() => setShow(true)}\n        className=\"flex items-center gap-1 text-xs text-muted-foreground hover:text-foreground transition-colors\"\n        animate={show ? { height: 0, opacity: 0, overflow: 'visible', pointerEvents: 'none', scale: 0.5 } : { height: 'auto', opacity: 1, scale: 1, pointerEvents: 'auto' }}\n      >\n        <ClockIcon className=\"size-3\" />\n        {error ? 'Not finished' : <DiffSeconds from={message?.message?.created_at} to={message?.message?.finished_at} />}\n      </motion.button>\n    </div>\n  );\n}\n\nconst CheckedCircle = motion(CheckCircleIcon);\nconst InformationCircle = motion(InfoIcon);\n\nconst itemInitial: Target = { opacity: 0.5 };\nconst itemAnimate: Target = { opacity: 1 };\n\nconst itemIconInitial: Target = { color: 'rgb(113 113 122 / 50)' };\nconst itemSuccessIconAnimate: Target = { color: 'rgb(34 197 94)' };\nconst itemErrorIconAnimate: Target = { color: 'rgb(239 68 68)' };\n\nfunction MessageAnnotationHistoryItem ({ history, item: { state, time }, index }: { history: OngoingStateHistoryItem[], index: number, item: OngoingStateHistoryItem }) {\n  return (\n    <motion.li className=\"relative mb-2\" initial={itemInitial} animate={itemAnimate}>\n      {index > 1 && <span className=\"absolute left-2 bg-green-500/50 h-2\" style={{ width: 1, top: -8 }} />}\n      <div className=\"flex gap-2 items-center\">\n        <CheckedCircle className=\"size-4\" initial={itemIconInitial} animate={itemSuccessIconAnimate} />\n        <span>{state.display}</span>\n        {index > 0 && <DiffSeconds className=\"text-muted-foreground text-xs\" from={history[index - 1].time} to={time} />}\n      </div>\n      {state.message && <div className=\"ml-2 pl-4 text-muted-foreground text-xs border-l border-l-green-500/50 pt-1\">{state.message}</div>}\n    </motion.li>\n  );\n}\n\nfunction MessageAnnotationHistoryError ({ history, error }: { history: OngoingStateHistoryItem[], error: string }) {\n  return (\n    <motion.li className=\"relative mb-2\" initial={itemInitial} animate={itemAnimate}>\n      {history.length > 0 && <span className=\"absolute left-2 bg-muted-foreground h-2\" style={{ width: 1, top: -8 }} />}\n      <div className=\"flex gap-2 items-center\">\n        <InformationCircle className=\"size-4\" initial={itemIconInitial} animate={itemErrorIconAnimate} />\n        <span>{error}</span>\n      </div>\n    </motion.li>\n  );\n}\n\nfunction MessageAnnotationCurrent ({ history, current }: { history: OngoingStateHistoryItem[], current: OngoingState }) {\n  return (\n    <motion.li\n      key={current.state}\n      className=\"relative space-y-1\"\n      initial={{\n        opacity: 0,\n        height: 0,\n        x: -40,\n      }}\n      animate={{\n        opacity: 0.5,\n        height: 'auto',\n        x: 0,\n      }}\n    >\n      <div className=\"flex gap-2 items-center\">\n        {(history?.length ?? 0) > 1 && <span className=\"absolute left-2 h-2 bg-zinc-500/50\" style={{ width: 1, top: -8 }} />}\n        <Loader2Icon className=\"size-4 animate-spin repeat-infinite text-muted-foreground\" />\n        <span>\n          {current.display}\n        </span>\n        {history && history.length > 0 && <DiffSeconds className=\"text-muted-foreground text-xs\" from={history[history.length - 1].time} />}\n      </div>\n      {current.message && <div className=\"ml-2 pl-4 text-muted-foreground text-xs border-l border-l-zinc-500 pt-1\">{current.message}</div>}\n    </motion.li>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-answer.tsx",
    "content": "import { useChatMessageField, useChatMessageStreamContainsState } from '@/components/chat/chat-hooks';\nimport type { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState } from '@/components/chat/chat-stream-state';\nimport { MessageBetaAlert } from '@/components/chat/message-beta-alert';\nimport { MessageContent } from '@/components/chat/message-content';\n\nexport function MessageAnswer ({ message, showBetaAlert }: { message: ChatMessageController | undefined, showBetaAlert?: boolean }) {\n  const content = useChatMessageField(message, 'content');\n  const shouldShow = useChatMessageStreamContainsState(message, AppChatStreamState.GENERATE_ANSWER);\n\n  if (!shouldShow && !content?.length) {\n    return null;\n  }\n\n  return (\n    <>\n      <div className=\"font-normal text-lg flex items-center gap-2\">\n        <svg className=\"dark:hidden size-4\" viewBox=\"0 0 745 745\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\">\n          <rect x=\"12\" y=\"12\" width=\"721\" height=\"721\" rx=\"108\" stroke=\"#212121\" strokeWidth=\"24\" />\n          <rect x=\"298\" y=\"172\" width=\"150\" height=\"150\" rx=\"24\" fill=\"#212121\" />\n          <rect x=\"298\" y=\"422\" width=\"150\" height=\"150\" rx=\"24\" fill=\"#212121\" />\n        </svg>\n        <svg className=\"hidden dark:block size-4\" viewBox=\"0 0 745 745\" fill=\"none\" xmlns=\"http://www.w3.org/2000/svg\">\n          <rect x=\"12\" y=\"12\" width=\"721\" height=\"721\" rx=\"108\" stroke=\"white\" strokeWidth=\"24\" />\n          <rect x=\"298\" y=\"172\" width=\"150\" height=\"150\" rx=\"24\" fill=\"white\" />\n          <rect x=\"298\" y=\"422\" width=\"150\" height=\"150\" rx=\"24\" fill=\"white\" />\n        </svg>\n        Answer\n      </div>\n      {showBetaAlert && <MessageBetaAlert />}\n      <MessageContent message={message} />\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/chat/message-auto-scroll.tsx",
    "content": "'use client';\n\nimport type { ChatMessage } from '@/api/chats';\nimport { useRequestScroll } from '@/components/auto-scroll';\nimport type { ChatMessageController, OngoingState } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState, type StackVMState } from '@/components/chat/chat-stream-state';\nimport { useEffect } from 'react';\n\nexport function MessageAutoScroll ({ message }: { message: ChatMessageController }) {\n  const requestScroll = useRequestScroll();\n\n  useEffect(() => {\n    let handler1: any;\n    let handler2: any;\n\n    message.on('stream-update', handler1 = (_: ChatMessage, ongoing: OngoingState<AppChatStreamState | StackVMState>) => {\n      if (ongoing.state === AppChatStreamState.GENERATE_ANSWER) {\n        requestScroll('bottom');\n      }\n    });\n\n    message.once('stream-finished', handler2 = () => {\n      message.off('stream-update', handler1);\n    });\n\n    return () => {\n      message.off('stream-update', handler1);\n      message.off('stream-finished', handler2);\n    };\n  }, [message]);\n\n  return null;\n}"
  },
  {
    "path": "frontend/app/src/components/chat/message-beta-alert.tsx",
    "content": "import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { FlaskConicalIcon } from 'lucide-react';\n\nexport function MessageBetaAlert () {\n  return (\n    <Alert variant=\"info\" className='my-2'>\n      <FlaskConicalIcon />\n      <AlertTitle>\n        This chatbot is in Beta.\n      </AlertTitle>\n      <AlertDescription>\n        All generated information should be verified prior to use.\n      </AlertDescription>\n    </Alert>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-content-sources.tsx",
    "content": "import type { ChatMessageSource } from '@/api/chats';\nimport { useChatMessageField, useChatMessageStreamContainsState, useChatMessageStreamState } from '@/components/chat/chat-hooks';\nimport { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState } from '@/components/chat/chat-stream-state';\nimport { isNotFinished, parseHref, parseSource } from '@/components/chat/utils';\nimport { ScrollArea, ScrollBar } from '@/components/ui/scroll-area';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { cn } from '@/lib/utils';\nimport { motion } from 'framer-motion';\nimport { LinkIcon, TextSearchIcon } from 'lucide-react';\nimport { useMemo } from 'react';\n\nexport function MessageContextSources ({ message }: { message: ChatMessageController | undefined }) {\n  const sources = useChatMessageField(message, 'sources');\n  const ongoing = useChatMessageStreamState(message);\n\n  const shouldShow = useChatMessageStreamContainsState(message, AppChatStreamState.SEARCH_RELATED_DOCUMENTS);\n\n  if (!shouldShow) {\n    return null;\n  }\n\n  const uriSet = new Set<string>();\n  const reducedContext = sources?.filter(source => {\n    if (uriSet.has(source.source_uri)) {\n      return false;\n    }\n    uriSet.add(source.source_uri);\n    return true;\n  });\n\n  const animation = isNotFinished(ongoing);\n  const hasSources = !!sources?.length;\n  const empty = sources && sources.length === 0;\n\n  return (\n    <>\n      <div className={cn('font-normal text-lg flex items-center gap-2 transition-opacity opacity-100', !hasSources && 'opacity-50')}>\n        <TextSearchIcon size=\"1em\" />\n        Sources\n      </div>\n      {hasSources && <ScrollArea className=\"h-max w-full\">\n        <ul className=\"flex gap-2 py-4\">\n          {reducedContext?.map((source, index) => (\n            <MessageContextSource key={source.source_uri} context={source} animation={animation} index={index} />\n          ))}\n        </ul>\n        <ScrollBar orientation=\"horizontal\" />\n      </ScrollArea>}\n      {empty && ongoing?.state !== AppChatStreamState.SEARCH_RELATED_DOCUMENTS && <div className=\"text-muted-foreground\">Empty</div>}\n      {empty && ongoing?.state === AppChatStreamState.SEARCH_RELATED_DOCUMENTS && (\n        <ul className=\"flex gap-2 py-4\">\n          <Skeleton className=\"rounded\" style={{ width: 198, height: 52 }} />\n          <Skeleton className=\"rounded\" style={{ width: 198, height: 52 }} />\n          <Skeleton className=\"rounded\" style={{ width: 198, height: 52 }} />\n        </ul>\n      )}\n    </>\n  );\n}\n\nfunction MessageContextSource ({ index, animation, context }: { index: number, animation: boolean, context: ChatMessageSource }) {\n  const source = useMemo(() => {\n    return parseSource(context.source_uri);\n  }, [context.source_uri]);\n\n  return (\n    <motion.li\n      key={context.id}\n      className=\"bg-card hover:bg-accent transition-colors w-[200px] overflow-hidden rounded-lg border text-xs\"\n      transition={{ delay: index * 0.1 }}\n      initial={animation && { x: '-30%', opacity: 0 }}\n      animate={{ x: 0, opacity: 1 }}\n    >\n      <a className=\"flex flex-col justify-between space-y-1 p-2 max-w-full h-full\" {...parseHref(context)}>\n        <div className=\"font-normal line-clamp-3 opacity-90\">\n          {context.name}\n        </div>\n        <div className=\"opacity-70 mt-auto mb-0\">\n          <LinkIcon size=\"1em\" className=\"inline-flex mr-1\" />\n          {source}\n        </div>\n      </a>\n    </motion.li>\n  );\n}\n\nexport function MessageContextSourceCard ({ title, href }: { title?: string, href?: string }) {\n  const source = useMemo(() => {\n    return parseSource(href);\n  }, [href]);\n\n  const isHttp = /^https?:\\/\\//.test(href ?? '');\n\n  return (\n    <a className=\"flex flex-col justify-between space-y-1 p-2 max-w-full h-full\" href={isHttp ? href : 'javascript:(void)'} target=\"_blank\">\n      <div className=\"font-normal line-clamp-3 opacity-90\">\n        {title}\n      </div>\n      <div className=\"opacity-70 mt-auto mb-0\">\n        <LinkIcon size=\"1em\" className=\"inline-flex mr-1\" />\n        {source}\n      </div>\n    </a>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-content.test.tsx",
    "content": "import { ChatMessageRole } from '@/api/chats';\n\nimport { LegacyChatMessageController } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState } from '@/components/chat/chat-stream-state';\nimport { MessageContent } from '@/components/chat/message-content';\nimport { act, render, screen } from '@testing-library/react';\n\ntest('should render incremental markdown text', async () => {\n  const controller = new LegacyChatMessageController({\n    sources: [],\n    content: '## Hello',\n    id: 0,\n    role: ChatMessageRole.assistant,\n    created_at: new Date(),\n    updated_at: new Date(),\n    ordinal: 0,\n    chat_id: '0',\n    error: null,\n    finished_at: null,\n    trace_url: null,\n    user_id: null,\n    post_verification_result_url: null,\n  }, {\n    state: AppChatStreamState.GENERATE_ANSWER,\n    finished: false,\n    display: '',\n  });\n\n  render(<MessageContent message={controller} />);\n\n  expect(await screen.findByText('Hello')).not.toBeNull();\n\n  act(() => {\n    controller.applyDelta(' world!');\n  });\n\n  expect(await screen.findByText('Hello world!')).not.toBeNull();\n});\n\ntest('should render static markdown text', async () => {\n  const controller = new LegacyChatMessageController({\n    sources: [],\n    content: '## Hello world!',\n    id: 0,\n    role: ChatMessageRole.assistant,\n    created_at: new Date(),\n    updated_at: new Date(),\n    ordinal: 0,\n    chat_id: '0',\n    error: null,\n    finished_at: null,\n    trace_url: null,\n    user_id: null,\n    post_verification_result_url: null,\n  }, {\n    state: AppChatStreamState.GENERATE_ANSWER,\n    finished: false,\n    display: '',\n  });\n\n  render(<MessageContent message={controller} />);\n\n  expect(await screen.findByText('Hello world!')).not.toBeNull();\n});\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-content.tsx",
    "content": "import { useChatMessageField } from '@/components/chat/chat-hooks';\nimport { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { RemarkContent } from '@/components/remark-content';\n\nexport function MessageContent ({ message }: { message: ChatMessageController | undefined }) {\n  const content = useChatMessageField(message, 'content') ?? '';\n  return (\n    <RemarkContent>\n      {content}\n    </RemarkContent>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-error.tsx",
    "content": "import { useChatMessageField, useChatMessageStreamState } from '@/components/chat/chat-hooks';\nimport { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { AppChatStreamState } from '@/components/chat/chat-stream-state';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { format } from 'date-fns';\n\nexport function MessageError ({ message }: { message: ChatMessageController }) {\n  const messageError = useChatMessageField(message, 'error');\n  const ongoing = useChatMessageStreamState(message);\n\n  let variant: 'destructive' | 'warning' = 'destructive';\n  let errorTitle = 'Failed to generate response';\n  let error: string | undefined;\n\n  if (messageError) {\n    error = messageError;\n  } else if (ongoing?.state === AppChatStreamState.UNKNOWN) {\n    variant = 'warning';\n    errorTitle = 'Unable to access message content';\n    error = `This message is not finished yet or accidentally terminated. (created at ${format(message.message.created_at, 'yyyy-MM-dd HH:mm:ss')})`;\n  }\n\n  if (error) {\n    return (\n      <Alert variant={variant}>\n        <AlertTitle>{errorTitle}</AlertTitle>\n        <AlertDescription>{error}</AlertDescription>\n      </Alert>\n    );\n  } else {\n    return null;\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-feedback.tsx",
    "content": "import type { FeedbackParams } from '@/api/chats';\nimport { usePortalContainer } from '@/components/portal-provider';\nimport { Button } from '@/components/ui/button';\nimport { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog';\nimport { Textarea } from '@/components/ui/textarea';\nimport { ToggleGroup, ToggleGroupItem } from '@/components/ui/toggle-group';\nimport { Loader2Icon, ThumbsDownIcon, ThumbsUpIcon } from 'lucide-react';\nimport { type ReactNode, useEffect, useState } from 'react';\n\nexport function MessageFeedback ({ initial, onFeedback, defaultAction, children }: { initial?: FeedbackParams, defaultAction?: 'like' | 'dislike', onFeedback: (action: 'like' | 'dislike', comment: string) => Promise<void>, children: ReactNode }) {\n  const [open, setOpen] = useState(false);\n  const [action, setAction] = useState<'like' | 'dislike'>(initial?.feedback_type ?? defaultAction ?? 'like');\n  const [comment, setComment] = useState(initial?.comment ?? '');\n  const [running, setRunning] = useState(false);\n  const [deleting, setDeleting] = useState(false);\n\n  useEffect(() => {\n    if (defaultAction && !initial) {\n      setAction(defaultAction);\n    }\n  }, [defaultAction, initial]);\n\n  useEffect(() => {\n    if (initial) {\n      setAction(initial.feedback_type);\n      setComment(initial.comment);\n    }\n  }, [initial]);\n\n  const disabled = running || deleting || !!initial;\n\n  const container = usePortalContainer();\n\n  return (\n    <Dialog open={open} onOpenChange={setOpen}>\n      {children}\n      <DialogContent container={container} className=\"space-y-4\">\n        <DialogHeader>\n          <DialogTitle>\n            Feedback\n          </DialogTitle>\n        </DialogHeader>\n        <section className=\"space-y-2\">\n          <h6 className=\"text-sm font-bold\">Do you like this answer</h6>\n          <ToggleGroup disabled={disabled} className=\"w-max\" type=\"single\" value={action} onValueChange={value => setAction(value as any)}>\n            <ToggleGroupItem value=\"like\" className=\"data-[state=on]:text-success data-[state=on]:bg-success/10\">\n              <ThumbsUpIcon className=\"w-4 h-4 mr-2\" />\n              Like\n            </ToggleGroupItem>\n            <ToggleGroupItem value=\"dislike\" className=\"data-[state=on]:text-destructive data-[state=on]:bg-destructive/10\">\n              <ThumbsDownIcon className=\"w-4 h-4 mr-2\" />\n              Dislike\n            </ToggleGroupItem>\n          </ToggleGroup>\n        </section>\n        <section>\n          <Textarea\n            placeholder=\"Comments...\"\n            value={comment}\n            onChange={e => setComment(e.target.value)}\n            disabled={disabled}\n          />\n        </section>\n        <div className=\"flex w-full justify-end items-center gap-2\">\n          <Button\n            className=\"gap-2\"\n            disabled={disabled}\n            onClick={() => {\n              setRunning(true);\n              onFeedback(action, comment)\n                .then(() => setOpen(false))\n                .finally(() => {\n                  setRunning(false);\n                });\n            }}>\n            {running && <Loader2Icon className=\"w-4 h-4 animate-spin repeat-infinite\" />}\n            Add feedback\n          </Button>\n        </div>\n      </DialogContent>\n    </Dialog>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-input.tsx",
    "content": "'use client';\n\nimport { listChatEngines } from '@/api/chat-engines';\nimport { useAuth } from '@/components/auth/AuthProvider';\nimport { useAllChatEngines } from '@/components/chat-engine/hooks';\nimport { Badge } from '@/components/ui/badge';\nimport { Button } from '@/components/ui/button';\nimport { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';\nimport { cn } from '@/lib/utils';\nimport isHotkey from 'is-hotkey';\nimport { ArrowUpIcon } from 'lucide-react';\nimport { type ChangeEvent, type Ref, useCallback, useRef } from 'react';\nimport TextareaAutosize, { type TextareaAutosizeProps } from 'react-textarea-autosize';\nimport useSWR from 'swr';\n\nexport interface MessageInputProps {\n  className?: string,\n  disabled?: boolean,\n  actionDisabled?: boolean,\n  inputRef?: Ref<HTMLTextAreaElement>,\n  inputProps?: TextareaAutosizeProps,\n  engine?: string,\n  onEngineChange?: (name: string) => void,\n}\n\nexport function MessageInput ({\n  className,\n  disabled,\n  actionDisabled,\n  inputRef,\n  inputProps,\n  engine,\n  onEngineChange,\n}: MessageInputProps) {\n  const auth = useAuth();\n  const buttonRef = useRef<HTMLButtonElement>(null);\n\n  const onChangeRef = useRef(inputProps?.onChange);\n  onChangeRef.current = inputProps?.onChange;\n  const handleChange = useCallback((ev: ChangeEvent<HTMLTextAreaElement>) => {\n    onChangeRef.current?.(ev);\n  }, []);\n\n  const { data, isLoading } = useAllChatEngines(!auth.me?.is_superuser);\n  const showShowSelectChatEngine = !!data?.length && !!onEngineChange;\n\n  return (\n    <div className={cn('bg-background border p-2 rounded-lg', className)}>\n      <TextareaAutosize\n        placeholder=\"Input your question here...\"\n        onKeyDown={e => {\n          if (!e.nativeEvent.isComposing && isHotkey('mod+Enter', e) && !actionDisabled) {\n            e.preventDefault();\n            buttonRef.current?.click();\n          }\n        }}\n        {...inputProps}\n        onChange={handleChange}\n        ref={inputRef}\n        className=\"w-full border-none ring-0 outline-none bg-background focus-visible:ring-0 focus-visible:ring-offset-0 resize-none placeholder:font-light placeholder-gray-400 dark:placeholder-gray-500 max-h-72\"\n        disabled={disabled || inputProps?.disabled}\n        minRows={4}\n      />\n      <div className=\"flex items-center justify-end gap-2\">\n        {showShowSelectChatEngine && <Select value={engine ?? ''} onValueChange={value => onEngineChange?.(value)}>\n          <SelectTrigger className=\"w-max border-none h-max\" disabled={isLoading}>\n            <SelectValue placeholder=\"Select Chat Engine\" />\n          </SelectTrigger>\n          <SelectContent>\n            {data?.map(item => (\n              <SelectItem key={item.name} value={String(item.name)} textValue={item.name}>\n              <span className=\"flex items-center gap-2\">\n                {item.is_default ? <Badge variant=\"outline\" className=\"text-success border-success/70\">default</Badge> : item.name}\n                {!!item.engine_options.external_engine_config?.stream_chat_api_url\n                  ? <Badge>External Engine (StackVM)</Badge>\n                  : item.engine_options.knowledge_graph?.enabled !== false /* TODO: require default config */\n                    ? <Badge variant=\"secondary\">Knowledge graph enabled</Badge>\n                    : undefined}\n              </span>\n              </SelectItem>\n            ))}\n          </SelectContent>\n        </Select>}\n        <Button size=\"icon\" className=\"rounded-full flex-shrink-0 w-8 h-8 p-2\" disabled={actionDisabled || disabled} ref={buttonRef}>\n          <ArrowUpIcon className=\"w-full h-full\" />\n        </Button>\n      </div>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/message-operations.tsx",
    "content": "import { useChatMessageStreamState } from '@/components/chat/chat-hooks';\nimport { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { MessageFeedback } from '@/components/chat/message-feedback';\nimport { useMessageFeedback } from '@/components/chat/use-message-feedback';\nimport { usePortalContainer } from '@/components/portal-provider';\nimport { Button } from '@/components/ui/button';\nimport { DialogTrigger } from '@/components/ui/dialog';\nimport { Tooltip, TooltipContent, TooltipPortal, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { cn } from '@/lib/utils';\nimport copy from 'copy-to-clipboard';\nimport { CopyCheckIcon, CopyIcon, ThumbsDownIcon, ThumbsUpIcon } from 'lucide-react';\nimport { useState } from 'react';\n\nexport function MessageOperations ({ message }: { message: ChatMessageController }) {\n  const streamState = useChatMessageStreamState(message);\n  const { feedbackData, feedback: callFeedback, disabled } = useMessageFeedback(message.id);\n  const [copied, setCopied] = useState(false);\n  const [clicked, setClicked] = useState<'like' | 'dislike'>('like');\n  const container = usePortalContainer();\n\n  if (streamState) {\n    return null;\n  }\n  return (\n    <TooltipProvider>\n      <div className=\"flex items-center gap-2\">\n        {/*<Button*/}\n        {/*  size=\"sm\"*/}\n        {/*  className=\"gap-1 text-xs px-2 py-1 h-max\"*/}\n        {/*  variant=\"ghost\"*/}\n        {/*  onClick={() => controller.regenerate(message.id)}*/}\n        {/*  disabled*/}\n        {/*>*/}\n        {/*  <RefreshCwIcon size=\"1em\" />*/}\n        {/*  Regenerate*/}\n        {/*</Button>*/}\n\n        <MessageFeedback\n          initial={feedbackData}\n          defaultAction={clicked}\n          onFeedback={async (action, comment) => callFeedback(action, comment)}\n        >\n          {feedbackData\n            ? (<DialogTrigger asChild>\n              <Button size=\"icon\" variant=\"ghost\" className=\"rounded-full w-7 h-7\" disabled={disabled}>\n                {feedbackData.feedback_type === 'like' ? <ThumbsUpIcon className=\"w-4 h-4 text-success\" /> : <ThumbsDownIcon className=\"w-4 h-4 text-destructive\" />}\n              </Button>\n            </DialogTrigger>)\n            : (<>\n              <Tooltip>\n                <DialogTrigger asChild>\n                  <TooltipTrigger asChild>\n                    <Button size=\"icon\" variant=\"ghost\" className=\"rounded-full w-7 h-7\" disabled={disabled} onClick={() => { setClicked('like'); }} aria-label=\"Like This Answer\">\n                      <ThumbsUpIcon className=\"w-4 h-4\" />\n                    </Button>\n                  </TooltipTrigger>\n                </DialogTrigger>\n                <TooltipPortal container={container}>\n                  <TooltipContent>\n                    I like this answer :)\n                  </TooltipContent>\n                </TooltipPortal>\n              </Tooltip>\n              <Tooltip>\n                <DialogTrigger asChild>\n                  <TooltipTrigger asChild>\n                    <Button size=\"icon\" variant=\"ghost\" className=\"rounded-full w-7 h-7\" disabled={disabled} onClick={() => { setClicked('dislike'); }} aria-label=\"Dislike This Answer\">\n                      <ThumbsDownIcon className=\"w-4 h-4\" />\n                    </Button>\n                  </TooltipTrigger>\n                </DialogTrigger>\n                <TooltipPortal container={container}>\n                  <TooltipContent>\n                    I dislike this answer :(\n                  </TooltipContent>\n                </TooltipPortal>\n              </Tooltip>\n            </>)}\n        </MessageFeedback>\n\n        <Tooltip>\n          <TooltipTrigger asChild>\n            <Button\n              size=\"icon\"\n              variant=\"ghost\"\n              className={cn('rounded-full w-7 h-7 transition-colors', copied && 'text-success hover:text-success hover:bg-success/10')}\n              onClick={() => {\n                setCopied(copy(message.content));\n              }}\n            >\n              {copied\n                ? <CopyCheckIcon className=\"w-4 h-4\" />\n                : <CopyIcon className=\"w-4 h-4\" />}\n            </Button>\n          </TooltipTrigger>\n          <TooltipPortal container={container}>\n            <TooltipContent>\n              {copied ? 'Copied!' : 'Copy'}\n            </TooltipContent>\n          </TooltipPortal>\n        </Tooltip>\n      </div>\n    </TooltipProvider>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/chat/message-recommend-questions.tsx",
    "content": "import { getChatMessageRecommendedQuestions, reloadChatMessageRecommendedQuestions } from '@/api/chats';\nimport { useChatMessageField, useChatMessageStreamState, useCurrentChatController } from '@/components/chat/chat-hooks';\nimport type { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { isNotFinished } from '@/components/chat/utils';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { getErrorMessage } from '@/lib/errors';\nimport { cn } from '@/lib/utils';\nimport { PlusIcon, RefreshCwIcon } from 'lucide-react';\nimport { useState } from 'react';\nimport useSWR from 'swr';\n\nexport function MessageRecommendQuestions ({ assistant }: { assistant: ChatMessageController | undefined }) {\n  const controller = useCurrentChatController();\n\n  const state = useChatMessageStreamState(assistant);\n  const finishedAt = useChatMessageField(assistant, 'finished_at');\n  const shouldPerformRequest = !isNotFinished(state) && !!finishedAt;\n  const { data, mutate, isLoading } = useSWR(assistant && shouldPerformRequest && `chat.messages.${assistant.id}.recommend-questions`, () => getChatMessageRecommendedQuestions(assistant!.id), { revalidateOnFocus: false });\n\n  const [reloading, setReloading] = useState(false);\n  const [reloadError, setReloadError] = useState<unknown>();\n\n  const reload = () => {\n    if (!assistant || !shouldPerformRequest) {\n      return;\n    }\n    setReloading(true);\n    reloadChatMessageRecommendedQuestions(assistant.id)\n      .then(res => mutate(res, { revalidate: false }), error => setReloadError(error))\n      .finally(() => {\n        setReloading(false);\n      });\n  };\n\n  if (!shouldPerformRequest) {\n    return null;\n  }\n\n  return (\n    <>\n      <hr />\n      <section className=\"space-y-2\">\n        <div className={cn('font-normal text-lg flex items-center gap-2 transition-opacity opacity-100')}>\n          <div>\n            Further questions\n          </div>\n          <button\n            className={cn('ml-2 transition-colors text-primary/70 hover:text-primary', (isLoading || reloading) && 'text-muted-foreground')}\n            disabled={isLoading || reloading}\n            onClick={() => {\n              reload();\n            }}\n          >\n            <RefreshCwIcon className={cn('size-4 repeat-infinite', (isLoading || reloading) && 'animate-spin')} />\n          </button>\n        </div>\n        {!!reloadError && (\n          <Alert>\n            <AlertTitle>Failed to reload recommended questions</AlertTitle>\n            <AlertDescription>{getErrorMessage(reloadError)}</AlertDescription>\n          </Alert>\n        )}\n        <ul className=\"\">\n          {isLoading && ['w-[70%]', 'w-[30%]', 'w-[50%]'].map(i => (\n            <li key={i} className=\"last-of-type:border-b-0 border-b py-2 text-sm cursor-pointer transition-colors text-muted-foreground\">\n              <Skeleton className={cn('bg-muted h-3 py-0.5 w-60', i)} />\n            </li>\n          ))}\n          {data?.map((q, i) => (\n            <li key={i} className=\"text-sm last-of-type:border-b-0 border-b py-2\">\n              <button className=\"relative w-full text-left pr-8 cursor-pointer transition-colors text-muted-foreground hover:text-foreground\" onClick={() => {\n                void controller.post({\n                  content: q,\n                });\n              }}>\n                {q}\n                <PlusIcon className=\"absolute stroke-2 size-4 right-2 top-1/2 -translate-y-1/2\" />\n              </button>\n            </li>\n          ))}\n        </ul>\n      </section>\n    </>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/chat/message-section.tsx",
    "content": "import { useChatMessageStreamState } from '@/components/chat/chat-hooks';\nimport type { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { isNotFinished } from '@/components/chat/utils';\nimport { cn } from '@/lib/utils';\nimport { motion } from 'framer-motion';\nimport type { ReactNode } from 'react';\n\nexport function MessageSection ({ className, message, children }: { className?: string, message: ChatMessageController | undefined, children: ReactNode }) {\n  const state = useChatMessageStreamState(message);\n  const animation = isNotFinished(state);\n\n  return (\n    <motion.section\n      className={cn('space-y-0', className)}\n      initial={animation && { y: '-30%', opacity: 0 }}\n      animate={{ y: 0, opacity: 1 }}\n    >\n      {children}\n    </motion.section>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/chat/testutils.ts",
    "content": "import { type ChatMessage, ChatMessageRole } from '@/api/chats';\n\nexport function createExampleInitialChatMessage (): ChatMessage {\n  return {\n    id: 1,\n    post_verification_result_url: null,\n    chat_id: '0000',\n    ordinal: 1,\n    content: '',\n    created_at: new Date(),\n    updated_at: new Date(),\n    finished_at: null,\n    error: null,\n    role: ChatMessageRole.assistant,\n    sources: [],\n    trace_url: '',\n    user_id: 'example_user',\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/use-ask.ts",
    "content": "import { useChats } from '@/components/chat/chat-hooks';\nimport { useGtagFn } from '@/components/gtag-provider';\nimport { getErrorMessage } from '@/lib/errors';\nimport { toastError } from '@/lib/ui-error';\nimport { useRouter } from 'next/navigation';\nimport { useCallback, useEffect, useRef, useState, useTransition } from 'react';\n\nexport function useAsk (onFinish?: () => void) {\n  const gtagFn = useGtagFn();\n  const { newChat, disabled } = useChats();\n  const router = useRouter();\n  const [waiting, setWaiting] = useState(false);\n  const [transition, startTransition] = useTransition();\n  const [engine, setEngine] = useState<string>();\n  const engineRef = useRef<string>(undefined);\n\n  const ask = useCallback((message: string, options?: {\n    headers?: Record<string, string>;\n  }) => {\n    setWaiting(true);\n    const handleInitialError = (error: unknown) => {\n      setWaiting(false);\n      toastError('Failed to chat', getErrorMessage(error));\n    };\n\n    const controller = newChat(undefined, undefined, { content: message, chat_engine: engineRef.current, headers: options?.headers }, null, gtagFn);\n\n    controller.once('created', chat => {\n      controller.off('post-error', handleInitialError);\n\n      setWaiting(false);\n      startTransition(() => {\n        router.push(`/c/${chat.id}`);\n      });\n    })\n      .once('post-error', handleInitialError);\n  }, []);\n\n  useEffect(() => {\n    if (!waiting && !transition) {\n      onFinish?.();\n    }\n  }, [waiting, transition]);\n\n  return {\n    ask,\n    engine,\n    disabled,\n    setEngine: (engine: string | undefined) => {\n      engineRef.current = engine;\n      setEngine(engine);\n    },\n    loading: waiting || transition,\n  };\n}\n\nexport type UseAskReturns = ReturnType<typeof useAsk>;"
  },
  {
    "path": "frontend/app/src/components/chat/use-message-feedback.ts",
    "content": "'use client';\n\nimport { type FeedbackParams, postFeedback } from '@/api/chats';\nimport { useState } from 'react';\n\nexport interface UseMessageFeedbackReturns {\n  feedbackData: FeedbackParams | undefined;\n  disabled: boolean;\n\n  feedback (action: 'like' | 'dislike', comment: string): Promise<void>;\n}\n\nexport function useMessageFeedback (messageId: number | undefined, enabled: boolean = true): UseMessageFeedbackReturns {\n  const [feedback, setFeedback] = useState<FeedbackParams>();\n  const isLoading = false;\n  const isValidating = false;\n  const [acting, setActing] = useState(false);\n  const disabled = messageId == null && isValidating || isLoading || acting || !enabled;\n\n  return {\n    feedbackData: feedback,\n    disabled,\n    feedback: async (action, /* detail, */ comment) => {\n      if (!messageId) {\n        return;\n      }\n      setActing(true);\n      await postFeedback(messageId, { feedback_type: action, comment }).finally(() => setActing(false));\n      setFeedback({ feedback_type: action, comment });\n    },\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat/utils.ts",
    "content": "import type { ChatMessageSource } from '@/api/chats';\nimport type { OngoingState } from '@/components/chat/chat-message-controller';\n\nexport type { ChatEngineOptions } from '@/api/chat-engines';\n\nconst truncateUrl = (url: string, maxLength: number = 20): string => {\n  if (!url || url.length <= maxLength) return url;\n  const start = url.substring(0, maxLength / 2);\n  const end = url.substring(url.length - maxLength / 2);\n  return `${start}...${end}`;\n};\n\nexport function parseSource (uri?: string) {\n  if (!uri) {\n    return 'Unknown';\n  }\n  if (/^https:\\/\\//.test(uri)) {\n    return new URL(uri).hostname;\n  } else {\n    return truncateUrl(uri);\n  }\n}\n\nexport function parseHref (source: ChatMessageSource): { href: string, download?: string, target?: HTMLAnchorElement['target'] } {\n  if (/^https?:\\/\\//.test(source.source_uri)) {\n    return { href: source.source_uri, target: '_blank' };\n  } else if (source.source_uri.startsWith('uploads/')) {\n    return { href: `/api/v1/documents/${source.id}/download`, download: source.source_uri.slice(source.source_uri.lastIndexOf('/') + 1) };\n  } else {\n    return { href: 'javascript:void(0)' };\n  }\n}\n\nexport function isNotFinished (ongoing: OngoingState<any> | undefined) {\n  return !!ongoing && !ongoing.finished;\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat-engine/chat-engines-table.tsx",
    "content": "'use client';\n\nimport { type ChatEngine, createChatEngine, deleteChatEngine, listChatEngines } from '@/api/chat-engines';\nimport { actions } from '@/components/cells/actions';\nimport { boolean } from '@/components/cells/boolean';\nimport { datetime } from '@/components/cells/datetime';\nimport { mono } from '@/components/cells/mono';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { useBootstrapStatus } from '@/components/system/BootstrapStatusProvider';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { AlertTriangleIcon, CopyIcon, TrashIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { toast } from 'sonner';\n\nconst helper = createColumnHelper<ChatEngine>();\n\nconst columns = [\n  helper.accessor('id', { \n    header: 'ID',\n    cell: mono \n  }),\n  helper.accessor('name', { \n    header: 'NAME',\n    cell: context => <NameLink chatEngine={context.row.original} /> \n  }),\n  helper.accessor('created_at', { \n    header: 'CREATED AT',\n    cell: datetime \n  }),\n  helper.accessor('updated_at', { \n    header: 'UPDATED AT',\n    cell: datetime \n  }),\n  helper.accessor('is_default', { \n    header: 'IS DEFAULT',\n    cell: boolean \n  }),\n  helper.accessor('is_public', { \n    header: 'IS PUBLIC',\n    cell: boolean \n  }),\n  helper.display({\n    header: 'ACTIONS',\n    cell: actions((chatEngine) => [\n      {\n        key: 'clone',\n        action: async ({ startTransition, router }) => {\n          const { name, llm_id, fast_llm_id, engine_options } = chatEngine;\n          createChatEngine({\n            name: `${name} Copy`, llm_id, fast_llm_id, engine_options,\n          })\n            .then(newEngine => {\n              toast.success('Chat Engine successfully cloned.');\n              startTransition(() => {\n                router.push(`/chat-engines/${newEngine.id}`);\n              });\n            });\n        },\n        icon: <CopyIcon className=\"size-3\" />,\n        title: 'Clone',\n      },\n      {\n        key: 'delete',\n        action: async ({ table, setDropdownOpen }) => {\n          await deleteChatEngine(chatEngine.id);\n          table.reload?.();\n          setDropdownOpen(false);\n        },\n        title: 'Delete',\n        icon: <TrashIcon className=\"size-3\" />,\n        dangerous: {},\n      },\n    ]),\n  }),\n] as ColumnDef<ChatEngine>[];\n\nexport function ChatEnginesTable () {\n  return (\n    <DataTableRemote\n      columns={columns}\n      apiKey=\"api.chat-engines.list\"\n      api={listChatEngines}\n      idColumn=\"id\"\n    />\n  );\n}\n\nfunction NameLink ({ chatEngine }: { chatEngine: ChatEngine }) {\n  const { need_migration } = useBootstrapStatus();\n\n  const kbNotConfigured = !!need_migration.chat_engines_without_kb_configured?.includes(chatEngine.id);\n\n  return (\n    <Link\n      className=\"underline font-mono\"\n      href={`/chat-engines/${chatEngine.id}`}\n    >\n      {kbNotConfigured && <TooltipProvider>\n        <Tooltip>\n          <TooltipTrigger asChild>\n            <AlertTriangleIcon className=\"text-warning inline-flex mr-1 size-3\" />\n          </TooltipTrigger>\n          <TooltipContent className=\"text-xs\" align=\"start\">\n            Knowledge Base not configured.\n          </TooltipContent>\n        </Tooltip>\n      </TooltipProvider>}\n      {chatEngine.name}\n    </Link>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat-engine/create-chat-engine-form.tsx",
    "content": "'use client';\n\nimport { type ChatEngineOptions, createChatEngine } from '@/api/chat-engines';\nimport { KBListSelectForObjectValue } from '@/components/chat-engine/kb-list-select';\nimport { FormSection, FormSectionsProvider, useFormSectionFields } from '@/components/form-sections';\nimport { LLMSelect, RerankerSelect } from '@/components/form/biz';\nimport { FormCheckbox, FormInput, FormSwitch } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { FormRootError } from '@/components/form/root-error';\nimport { onSubmitHelper } from '@/components/form/utils';\nimport { PromptInput } from '@/components/form/widgets/PromptInput';\nimport { SecondaryNavigatorItem, SecondaryNavigatorLayout, SecondaryNavigatorList, SecondaryNavigatorMain } from '@/components/secondary-navigator-list';\nimport { Button } from '@/components/ui/button';\nimport { Form, formDomEventHandlers, useFormContext } from '@/components/ui/form.beta';\nimport { Separator } from '@/components/ui/separator';\nimport { cn } from '@/lib/utils';\nimport { useForm } from '@tanstack/react-form';\nimport { capitalCase } from 'change-case-all';\nimport { useRouter } from 'next/navigation';\nimport { type ReactNode, useEffect, useId, useState, useTransition } from 'react';\nimport { toast } from 'sonner';\nimport { z } from 'zod';\n\nconst schema = z.object({\n  name: z.string().min(1),\n  is_public: z.boolean().optional(),\n  llm_id: z.number().optional(),\n  fast_llm_id: z.number().optional(),\n  reranker_id: z.number().optional(),\n  engine_options: z.object({\n    knowledge_base: z.object({\n      linked_knowledge_bases: z.object({\n        id: z.number(),\n      }).array().min(1),\n    }),\n    knowledge_graph: z.object({\n      depth: z.number().min(1).nullable().optional(),\n    }).passthrough().optional(),\n    llm: z.object({}).passthrough().optional(),\n  }).passthrough(),\n});\n\nconst field = formFieldLayout<typeof schema>();\n\nconst nameSchema = z.string().min(1);\nconst kbSchema = z.object({ id: z.number() }).array().min(1);\nconst kgGraphDepthSchema = z.number().min(1).optional();\n\nexport function CreateChatEngineForm ({ defaultChatEngineOptions }: { defaultChatEngineOptions: ChatEngineOptions }) {\n  const [transitioning, startTransition] = useTransition();\n  const [submissionError, setSubmissionError] = useState<unknown>(undefined);\n  const router = useRouter();\n  const id = useId();\n\n  const form = useForm({\n    onSubmit: onSubmitHelper(schema, async data => {\n      if (data.is_public == null) {\n        data.is_public = true;\n      }\n      const ce = await createChatEngine(data);\n      startTransition(() => {\n        router.push(`/chat-engines/${ce.id}`);\n        router.refresh();\n      });\n    }, setSubmissionError),\n    onSubmitInvalid () {\n      toast.error('Validation failed', { description: 'Please check your chat engine configurations.' });\n    },\n  });\n\n  return (\n    <Form form={form} disabled={transitioning} submissionError={submissionError}>\n      <FormSectionsProvider>\n        <form id={id} {...formDomEventHandlers(form, transitioning)}>\n          <SecondaryNavigatorLayout defaultValue=\"General\">\n            <SecondaryNavigatorList>\n              <SectionTabTrigger required value=\"General\" />\n              <SectionTabTrigger required value=\"Retrieval\" />\n              <SectionTabTrigger value=\"Generation\" />\n              <SectionTabTrigger value=\"Experimental\" />\n              <Separator />\n              <FormRootError />\n              <Button className=\"w-full\" type=\"submit\" form={id} disabled={form.state.isSubmitting || transitioning}>\n                Create Chat Engine\n              </Button>\n            </SecondaryNavigatorList>\n\n            <Section title=\"General\">\n              <field.Basic required name=\"name\" label=\"Name\" defaultValue=\"\" validators={{ onSubmit: nameSchema, onBlur: nameSchema }}>\n                <FormInput placeholder=\"Enter chat engine name\" />\n              </field.Basic>\n              <field.Contained name='is_public' label=\"Is Public\" defaultValue={true}>\n                <FormSwitch />\n              </field.Contained>\n              <SubSection title=\"Models\">\n                <field.Basic name=\"llm_id\" label=\"LLM\">\n                  <LLMSelect />\n                </field.Basic>\n                <field.Basic name=\"fast_llm_id\" label=\"Fast LLM\">\n                  <LLMSelect />\n                </field.Basic>\n              </SubSection>\n            </Section>\n\n            <Section title=\"Retrieval\">\n              <SubSection title=\"Knowledge Sources\">\n                <field.Basic\n                  required\n                  name=\"engine_options.knowledge_base.linked_knowledge_bases\"\n                  label=\"Knowledge Bases\"\n                  validators={{ onChange: kbSchema, onSubmit: kbSchema }}\n                >\n                  <KBListSelectForObjectValue />\n                </field.Basic>\n                <field.Inline\n                  name=\"engine_options.hide_sources\"\n                  label=\"Hide Sources\"\n                  description=\"Hide knowledge sources in chat responses\"\n                  defaultValue={defaultChatEngineOptions.hide_sources}\n                >\n                  <FormCheckbox />\n                </field.Inline>\n              </SubSection>\n              <SubSection title=\"Semantic Search\">\n                <field.Basic name=\"reranker_id\" label=\"Reranker\">\n                  <RerankerSelect />\n                </field.Basic>\n              </SubSection>\n              <SubSection title=\"Knowledge Graph\">\n                <field.Contained\n                  name=\"engine_options.knowledge_graph.enabled\"\n                  label=\"Enable Knowledge Graph\"\n                  description=\"Enable knowledge graph to enrich context information\"\n                  defaultValue={defaultChatEngineOptions.knowledge_graph?.enabled}\n                >\n                  <FormSwitch />\n                </field.Contained>\n                <field.Basic name=\"engine_options.knowledge_graph.depth\" label=\"Depth\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.depth} validators={{ onBlur: kgGraphDepthSchema, onSubmit: kgGraphDepthSchema }}>\n                  <FormInput type=\"number\" min={1} step={1} />\n                </field.Basic>\n                <field.Inline name=\"engine_options.knowledge_graph.include_meta\" label=\"Include Metadata\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.include_meta} description=\"Include metadata information in knowledge graph nodes to provide additional context\">\n                  <FormCheckbox />\n                </field.Inline>\n                <field.Inline name=\"engine_options.knowledge_graph.with_degree\" label=\"With Degree\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.with_degree} description=\"Include entity in-degree and out-degree information in knowledge graph for weight calculation and ranking\">\n                  <FormCheckbox />\n                </field.Inline>\n                <field.Inline name=\"engine_options.knowledge_graph.using_intent_search\" label=\"Using Intent Search\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.using_intent_search} description=\"Enable intelligent search that breaks down user question into sub-questions for more comprehensive search results\">\n                  <FormCheckbox />\n                </field.Inline>\n                {(['intent_graph_knowledge', 'normal_graph_knowledge'] as const).map(name => (\n                  <field.Basic key={name} name={`engine_options.llm.${name}`} label={capitalCase(name)} fallbackValue={defaultChatEngineOptions.llm?.[name]} description={llmPromptDescriptions[name]}>\n                    <PromptInput />\n                  </field.Basic>\n                ))}\n              </SubSection>\n            </Section>\n\n            <Section title=\"Generation\">\n              <SubSection title=\"Clarify Question\">\n                <field.Contained\n                  unimportant\n                  name=\"engine_options.clarify_question\"\n                  label=\"Clarify Question\"\n                  description=\"Allow ChatBot to check if user input is ambiguous and ask clarifying questions\"\n                  defaultValue={defaultChatEngineOptions.clarify_question}\n                >\n                  <FormSwitch />\n                </field.Contained>\n                <field.Basic name=\"engine_options.llm.clarifying_question_prompt\" label=\"\" fallbackValue={defaultChatEngineOptions.llm?.clarifying_question_prompt} description={llmPromptDescriptions.clarifying_question_prompt}>\n                  <PromptInput />\n                </field.Basic>\n              </SubSection>\n              <SubSection title=\"Rewrite Question\">\n                <field.Basic name=\"engine_options.llm.condense_question_prompt\" label=\"\" fallbackValue={defaultChatEngineOptions.llm?.condense_question_prompt} description={llmPromptDescriptions.condense_question_prompt}>\n                  <PromptInput />\n                </field.Basic>\n              </SubSection>\n              <SubSection title=\"Answer Question\">\n                <field.Basic name=\"engine_options.llm.text_qa_prompt\" label=\"\" fallbackValue={defaultChatEngineOptions.llm?.text_qa_prompt} description={llmPromptDescriptions.text_qa_prompt}>\n                  <PromptInput />\n                </field.Basic>\n              </SubSection>\n              <SubSection title=\"Further Questions\">\n                <field.Contained\n                  unimportant\n                  name=\"engine_options.further_questions\"\n                  label=\"Show Further Questions\"\n                  description=\"Show suggested follow-up questions after each answer\"\n                  defaultValue={defaultChatEngineOptions.further_questions}\n                >\n                  <FormSwitch />\n                </field.Contained>\n                <field.Basic name=\"engine_options.llm.further_questions_prompt\" label=\"\" fallbackValue={defaultChatEngineOptions.llm?.further_questions_prompt} description={llmPromptDescriptions.further_questions_prompt}>\n                  <PromptInput />\n                </field.Basic>\n              </SubSection>\n            </Section>\n\n            <Section title=\"Experimental\">\n              <SubSection title=\"External Engine\">\n                <field.Basic name=\"engine_options.external_engine_config.stream_chat_api_url\" label=\"External Chat Engine API URL (StackVM)\" fallbackValue={defaultChatEngineOptions.external_engine_config?.stream_chat_api_url ?? ''}>\n                  <FormInput />\n                </field.Basic>\n                <field.Basic name=\"engine_options.llm.generate_goal_prompt\" label=\"Generate Goal Prompt\" fallbackValue={defaultChatEngineOptions.llm?.generate_goal_prompt} description={llmPromptDescriptions.generate_goal_prompt}>\n                  <PromptInput />\n                </field.Basic>\n              </SubSection>\n              <SubSection title=\"Post Verification\">\n                <field.Basic name=\"engine_options.post_verification_url\" label=\"Post Verifycation Service URL\" fallbackValue={defaultChatEngineOptions.post_verification_url ?? ''}>\n                  <FormInput />\n                </field.Basic>\n                <field.Basic name=\"engine_options.post_verification_token\" label=\"Post Verifycation Service Token\" fallbackValue={defaultChatEngineOptions.post_verification_token ?? ''}>\n                  <FormInput />\n                </field.Basic>\n              </SubSection>\n            </Section>\n          </SecondaryNavigatorLayout>\n        </form>\n      </FormSectionsProvider>\n    </Form>\n  );\n}\n\nfunction SectionTabTrigger ({ value, required }: { value: string, required?: boolean }) {\n  const [invalid, setInvalid] = useState(false);\n  const { form } = useFormContext();\n  const fields = useFormSectionFields(value);\n\n  useEffect(() => {\n    return form.store.subscribe(() => {\n      let invalid = false;\n      for (let field of fields.values()) {\n        if (field.getMeta().errors.length > 0) {\n          invalid = true;\n          break;\n        }\n      }\n      setInvalid(invalid);\n    });\n  }, [form, fields, value]);\n\n  return (\n    <SecondaryNavigatorItem value={value}>\n      <span className={cn(invalid && 'text-destructive')}>\n        {value}\n      </span>\n      {required && <sup className=\"text-destructive\" aria-hidden>*</sup>}\n    </SecondaryNavigatorItem>\n  );\n}\n\nfunction Section ({ title, children }: { title: string, children: ReactNode }) {\n  return (\n    <FormSection value={title}>\n      <SecondaryNavigatorMain className=\"space-y-8 max-w-screen-sm px-2 pb-8\" value={title} strategy=\"hidden\">\n        {children}\n      </SecondaryNavigatorMain>\n    </FormSection>\n  );\n}\n\nfunction SubSection ({ title, children }: { title: ReactNode, children: ReactNode }) {\n  return (\n    <section className=\"space-y-4\">\n      <h4 className=\"text-lg\">{title}</h4>\n      {children}\n    </section>\n  );\n}\n\nconst llmPromptFields = [\n  'condense_question_prompt',\n  'text_qa_prompt',\n  'intent_graph_knowledge',\n  'normal_graph_knowledge',\n  'clarifying_question_prompt',\n  'generate_goal_prompt',\n  'further_questions_prompt',\n] as const;\n\nconst llmPromptDescriptions: { [P in typeof llmPromptFields[number]]: string } = {\n  'condense_question_prompt': 'Prompt template for condensing a conversation history and follow-up question into a standalone question',\n  'text_qa_prompt': 'Prompt template for generating answers based on provided context and question',\n  'intent_graph_knowledge': 'Prompt template for processing and extracting knowledge from graph-based traversal methods',\n  'normal_graph_knowledge': 'Prompt template for processing and extracting knowledge from graph-based traversal methods',\n  'clarifying_question_prompt': 'Prompt template for generating clarifying questions when the user\\'s input needs more context or specificity',\n  'generate_goal_prompt': 'Prompt template for generating conversation goals and objectives based on user input',\n  'further_questions_prompt': 'Prompt template for generating follow-up questions to continue the conversation',\n};"
  },
  {
    "path": "frontend/app/src/components/chat-engine/hooks.ts",
    "content": "import { listChatEngines, listPublicChatEngines } from '@/api/chat-engines';\nimport { listAllHelper } from '@/lib/request';\nimport useSWR from 'swr';\n\nexport function useAllChatEngines (onlyPublic: boolean = false) {\n  return useSWR(onlyPublic ? 'api.chat-engines.list-all-public' : 'api.chat-engines.list-all', () => listAllHelper(onlyPublic ? listPublicChatEngines : listChatEngines, 'id'));\n}"
  },
  {
    "path": "frontend/app/src/components/chat-engine/kb-list-select.tsx",
    "content": "import { type FormControlWidgetProps } from '@/components/form/control-widget';\nimport { useAllKnowledgeBases } from '@/components/knowledge-base/hooks';\nimport { Command, CommandEmpty, CommandGroup, CommandInput, CommandItem, CommandList } from '@/components/ui/command';\nimport { Popover, PopoverContent } from '@/components/ui/popover';\nimport { getErrorMessage } from '@/lib/errors';\nimport { cn } from '@/lib/utils';\nimport * as PopoverPrimitive from '@radix-ui/react-popover';\nimport { AlertTriangleIcon, CheckIcon, DotIcon } from 'lucide-react';\nimport * as React from 'react';\nimport { useState } from 'react';\n\nexport function KBListSelect ({ ref, disabled, value, onChange, ...props }: FormControlWidgetProps<number[]>) {\n  const [open, setOpen] = useState(false);\n  const { data: knowledgeBases, isLoading, error } = useAllKnowledgeBases();\n  const isConfigReady = !isLoading && !error;\n\n  const current = value?.map(id => knowledgeBases?.find(kb => kb.id === id));\n\n  return (\n    <Popover open={open} onOpenChange={setOpen}>\n      <div className={cn('flex items-center gap-2')}>\n        <PopoverPrimitive.Trigger\n          ref={ref}\n          disabled={disabled || !isConfigReady}\n          className={cn(\n            'flex flex-col min-h-10 w-full text-left items-stretch justify-start rounded-md border border-input bg-background px-3 py-1 text-sm ring-offset-background placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50',\n          )}\n          {...props}\n        >\n          {isLoading\n            ? <span>Loading options...</span>\n            : !!error\n              ? <span className=\"text-destructive\">{getErrorMessage(error)}</span>\n              : !!current?.length\n                ? current.map((option, index) => (\n                  option ? (\n                    <div key={option.id} className=\"w-full block border-t first-of-type:border-t-0 py-2\">\n                      <span>{option.name}</span>\n                      <div className=\"text-xs text-muted-foreground ml-2 inline-flex gap-1 items-center\">\n                      <span>\n                        {(option.documents_total ?? 0) || <><AlertTriangleIcon className=\"text-warning inline-flex size-3 mr-0.5\" /> no</>} documents\n                      </span>\n                        <DotIcon className=\"size-4\" />\n                        <span className=\"text-xs text-muted-foreground\">\n                        {(option.data_sources_total ?? 0) || <><AlertTriangleIcon className=\"inline-flex size-3 mr-0.5\" /> no</>} data sources\n                      </span>\n                      </div>\n                    </div>\n                  ) : <span key={value?.[index]}>UNKNOWN KNOWLEDGE BASE {value?.[index]}</span>\n                )) : <span className=\"pt-1 text-muted-foreground\">Select Knowledge Bases</span>\n          }\n        </PopoverPrimitive.Trigger>\n      </div>\n      <PopoverContent className={cn('p-0 focus:outline-none w-[--radix-popover-trigger-width]')} align=\"start\" collisionPadding={8}>\n        <Command>\n          <CommandInput />\n          <CommandList>\n            <CommandGroup>\n              {knowledgeBases?.map(option => (\n                <CommandItem\n                  key={option.id}\n                  value={String(option.id)}\n                  keywords={[option.name, option.description ?? '']}\n                  className={cn('group')}\n                  onSelect={idValue => {\n                    const id = knowledgeBases?.find(option => String(option.id) === idValue)?.id;\n                    if (id) {\n                      if (value?.includes(id)) {\n                        onChange?.(value.filter(v => v !== id));\n                      } else {\n                        onChange?.([...(value ?? []), id]);\n                      }\n                    }\n                  }}\n                >\n                  <div className=\"space-y-1\">\n                    <div>\n                      <strong>\n                        {option.name}\n                      </strong>\n                    </div>\n                    <div className=\"text-xs text-muted-foreground flex gap-1 items-center\">\n                      <span>\n                        {(option.documents_total ?? 0) || <><AlertTriangleIcon className=\"text-warning inline-flex size-3 mr-0.5\" /> no</>} documents\n                      </span>\n                      <DotIcon className=\"size-4\" />\n                      <span>\n                        {(option.data_sources_total ?? 0) || <><AlertTriangleIcon className=\"inline-flex size-3 mr-0.5\" /> no</>} data sources\n                      </span>\n                    </div>\n                    <div className=\"text-xs text-muted-foreground\">\n                      {option.description}\n                    </div>\n                  </div>\n                  <CheckIcon className={cn('ml-auto size-4 opacity-0 flex-shrink-0', value?.includes(option.id) && 'opacity-100')} />\n                </CommandItem>\n              ))}\n            </CommandGroup>\n            <CommandEmpty className=\"text-muted-foreground/50 text-xs p-4 text-center\">\n              Empty List\n            </CommandEmpty>\n          </CommandList>\n        </Command>\n      </PopoverContent>\n    </Popover>\n  );\n}\n\nexport function KBListSelectForObjectValue ({ value, onChange, ...props }: FormControlWidgetProps<{ id: number }[], true>) {\n  return (\n    <KBListSelect\n      value={value?.map(v => v.id) ?? []}\n      onChange={value => {\n        onChange?.((value as number[]).map(id => ({ id })));\n      }}\n      {...props}\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/chat-engine/update-chat-engine-form.tsx",
    "content": "'use client';\n\nimport { type ChatEngine, type ChatEngineKnowledgeGraphOptions, type ChatEngineLLMOptions, type ChatEngineOptions, updateChatEngine } from '@/api/chat-engines';\nimport { KBListSelect } from '@/components/chat-engine/kb-list-select';\nimport { LLMSelect, RerankerSelect } from '@/components/form/biz';\nimport { FormCheckbox, FormInput, FormSwitch } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { PromptInput } from '@/components/form/widgets/PromptInput';\nimport { SecondaryNavigatorItem, SecondaryNavigatorLayout, SecondaryNavigatorList, SecondaryNavigatorMain } from '@/components/secondary-navigator-list';\nimport { fieldAccessor, GeneralSettingsField as GeneralSettingsField, type GeneralSettingsFieldAccessor, GeneralSettingsForm, shallowPick } from '@/components/settings-form';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { capitalCase } from 'change-case-all';\nimport { format } from 'date-fns';\nimport { useRouter } from 'next/navigation';\nimport { type ReactNode, useTransition } from 'react';\nimport { z } from 'zod';\n\nconst field = formFieldLayout<{ value: any | any[] }>();\n\nexport function UpdateChatEngineForm ({ chatEngine, defaultChatEngineOptions }: { chatEngine: ChatEngine, defaultChatEngineOptions: ChatEngineOptions }) {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n\n  return (\n    <GeneralSettingsForm\n      data={chatEngine}\n      readonly={false}\n      loading={transitioning}\n      onUpdate={async (data, path) => {\n        if (updatableFields.includes(path[0] as any)) {\n          const partial = shallowPick(data, path as [(typeof updatableFields)[number], ...any[]]);\n          await updateChatEngine(chatEngine.id, partial);\n          startTransition(() => {\n            router.refresh();\n          });\n        } else {\n          throw new Error(`${path.map(p => String(p)).join('.')} is not updatable currently.`);\n        }\n      }}\n    >\n      <SecondaryNavigatorLayout defaultValue=\"General\">\n        <SecondaryNavigatorList>\n          <SecondaryNavigatorItem value=\"General\">\n            General\n          </SecondaryNavigatorItem>\n          <SecondaryNavigatorItem value=\"Retrieval\">\n            Retrieval\n          </SecondaryNavigatorItem>\n          <SecondaryNavigatorItem value=\"Generation\">\n            Generation\n          </SecondaryNavigatorItem>\n          <SecondaryNavigatorItem value=\"Experimental\">\n            Experimental\n          </SecondaryNavigatorItem>\n          <div className=\"mt-auto pt-2 text-xs text-gray-500 space-y-1\">\n            <div className=\"flex justify-between px-3\">\n              <span>Created:</span>\n              <span>{format(chatEngine.created_at, 'yyyy-MM-dd HH:mm:ss')}</span>\n            </div>\n            <div className=\"flex justify-between px-3\">\n              <span>Updated:</span>\n              <span>{format(chatEngine.updated_at, 'yyyy-MM-dd HH:mm:ss')}</span>\n            </div>\n          </div>\n        </SecondaryNavigatorList>\n        <Section title=\"General\">\n          <GeneralSettingsField accessor={nameAccessor} schema={nameSchema}>\n            <field.Basic name=\"value\" label=\"Name\">\n              <FormInput placeholder=\"Enter chat engine name\" />\n            </field.Basic>\n          </GeneralSettingsField>\n          <GeneralSettingsField accessor={isDefaultAccessor} schema={isDefaultSchema}>\n            <field.Contained unimportant name=\"value\" label=\"Is Default\" fallbackValue={chatEngine.is_default} description=\"Set this chat engine as the default engine for new conversations\">\n              <FormSwitch />\n            </field.Contained>\n          </GeneralSettingsField>\n          <GeneralSettingsField accessor={isPublicAccessor} schema={isPublicSchema}>\n            <field.Contained unimportant name=\"value\" label=\"Is Public\" fallbackValue={chatEngine.is_public}>\n              <FormSwitch />\n            </field.Contained>\n          </GeneralSettingsField>\n          <SubSection title=\"Models\">\n            <GeneralSettingsField accessor={llmIdAccessor} schema={idSchema}>\n              <field.Basic name=\"value\" label=\"LLM\">\n                <LLMSelect />\n              </field.Basic>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={fastLlmIdAccessor} schema={idSchema}>\n              <field.Basic name=\"value\" label=\"Fast LLM\">\n                <LLMSelect />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n        </Section>\n\n        <Section title=\"Retrieval\">\n          <SubSection title=\"Knowledge Sources\">\n            <GeneralSettingsField accessor={kbAccessor} schema={kbSchema}>\n              <field.Basic required name=\"value\" label=\"Knowledge Bases\">\n                <KBListSelect />\n              </field.Basic>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={hideSourcesAccessor} schema={hideSourcesSchema}>\n              <field.Inline name=\"value\" label=\"Hide Sources\" fallbackValue={defaultChatEngineOptions.hide_sources} description=\"Hide knowledge sources in chat responses\">\n                <FormCheckbox />\n              </field.Inline>\n            </GeneralSettingsField>\n          </SubSection>\n          <SubSection title=\"Semantic Search\">\n            <GeneralSettingsField accessor={rerankerIdAccessor} schema={idSchema}>\n              <field.Basic name=\"value\" label=\"Reranker\">\n                <RerankerSelect />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n          <SubSection title=\"Knowledge Graph\">\n            <GeneralSettingsField accessor={kgEnabledAccessor} schema={kgEnabledSchema}>\n              <field.Contained name=\"value\" label=\"Enable Knowledge Graph\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.enabled} description=\"Enable knowledge graph to enrich context information\">\n                <FormSwitch />\n              </field.Contained>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={kgDepthAccessor} schema={kgDepthSchema}>\n              <field.Basic name=\"value\" label=\"Depth\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.depth} description=\"Set the maximum traversal depth for knowledge graph search (higher values allow finding more distant relationships)\">\n                <FormInput type=\"number\" min={1} />\n              </field.Basic>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={kgIncludeMetaAccessor} schema={kgIncludeMetaSchema}>\n              <field.Inline name=\"value\" label=\"Include Metadata\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.include_meta} description=\"Include metadata information in knowledge graph nodes to provide additional context\">\n                <FormCheckbox />\n              </field.Inline>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={kgWithDegreeAccessor} schema={kgWithDegreeSchema}>\n              <field.Inline name=\"value\" label=\"With Degree\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.with_degree} description=\"Include entity in-degree and out-degree information in knowledge graph for weight calculation and ranking\">\n                <FormCheckbox />\n              </field.Inline>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={kgUsingIntentSearchAccessor} schema={kgUsingIntentSearchSchema}>\n              <field.Inline name=\"value\" label=\"Using Intent Search\" fallbackValue={defaultChatEngineOptions.knowledge_graph?.using_intent_search} description=\"Enable intelligent search that breaks down user question into sub-questions for more comprehensive search results\">\n                <FormCheckbox />\n              </field.Inline>\n            </GeneralSettingsField>\n            {(['intent_graph_knowledge', 'normal_graph_knowledge'] as const).map(type => (\n              <GeneralSettingsField key={type} accessor={llmAccessor[type]} schema={llmSchema}>\n                <field.Basic name=\"value\" label={capitalCase(type)} description=\"Template for processing and extracting knowledge from graph-based traversal methods\" fallbackValue={defaultChatEngineOptions.llm?.[type]}>\n                  <PromptInput />\n                </field.Basic>\n              </GeneralSettingsField>\n            ))}\n          </SubSection>\n        </Section>\n\n        <Section title=\"Generation\">\n          <SubSection title=\"Clarify Question\">\n            <GeneralSettingsField accessor={clarifyAccessor} schema={clarifyAccessorSchema}>\n              <field.Contained unimportant name=\"value\" label=\"Clarify Question\" fallbackValue={defaultChatEngineOptions.clarify_question} description=\"Allow ChatBot to check if user input is ambiguous and ask clarifying questions\">\n                <FormSwitch />\n              </field.Contained>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={llmAccessor.clarifying_question_prompt} schema={llmSchema}>\n              <field.Basic name=\"value\" label=\"\" description=\"Prompt template for generating clarifying questions when the user's input needs more context or specificity\" fallbackValue={defaultChatEngineOptions.llm?.clarifying_question_prompt}>\n                <PromptInput />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n          <SubSection title=\"Rewrite Question\">\n            <GeneralSettingsField accessor={llmAccessor.condense_question_prompt} schema={llmSchema}>\n              <field.Basic name=\"value\" label=\"\" description={promptDescriptions.condense_question_prompt} fallbackValue={defaultChatEngineOptions.llm?.condense_question_prompt}>\n                <PromptInput />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n          <SubSection title=\"Answer Question\">\n            <GeneralSettingsField accessor={llmAccessor.text_qa_prompt} schema={llmSchema}>\n              <field.Basic name=\"value\" label=\"\" description={promptDescriptions.text_qa_prompt} fallbackValue={defaultChatEngineOptions.llm?.text_qa_prompt}>\n                <PromptInput />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n          <SubSection title=\"Further Questions\">\n            <GeneralSettingsField accessor={optionAccessor('further_questions')} schema={z.boolean().nullable().optional()}>\n              <field.Contained\n                unimportant\n                name=\"value\"\n                label=\"Show Further Questions\"\n                fallbackValue={defaultChatEngineOptions.further_questions}\n                description=\"Show suggested follow-up questions after each answer\"\n              >\n                <FormSwitch />\n              </field.Contained>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={llmAccessor.further_questions_prompt} schema={llmSchema}>\n              <field.Basic name=\"value\" label=\"\" description=\"Template for generating follow-up questions to continue the conversation\" fallbackValue={defaultChatEngineOptions.llm?.further_questions_prompt}>\n                <PromptInput />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n        </Section>\n\n        <Section title=\"Experimental\">\n          <SubSection title=\"External Engine\">\n            <GeneralSettingsField accessor={externalEngineAccessor} schema={externalEngineSchema}>\n              <field.Basic name=\"value\" label=\"External Chat Engine API URL (StackVM)\" fallbackValue={defaultChatEngineOptions.external_engine_config?.stream_chat_api_url ?? ''}>\n                <FormInput />\n              </field.Basic>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={llmAccessor.generate_goal_prompt} schema={llmSchema}>\n              <field.Basic name=\"value\" label=\"Generate Goal Prompt\" description=\"Template used to generate conversation goals and objectives based on user input\" fallbackValue={defaultChatEngineOptions.llm?.generate_goal_prompt}>\n                <PromptInput />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n          <SubSection title=\"Post Verification\">\n            <GeneralSettingsField accessor={postVerificationUrlAccessor} schema={postVerificationUrlSchema}>\n              <field.Basic name=\"value\" label=\"Post Verifycation Service URL\" fallbackValue={defaultChatEngineOptions.post_verification_url ?? ''}>\n                <FormInput />\n              </field.Basic>\n            </GeneralSettingsField>\n            <GeneralSettingsField accessor={postVerificationTokenAccessor} schema={postVerificationTokenSchema}>\n              <field.Basic name=\"value\" label=\"Post Verifycation Service Token\" fallbackValue={defaultChatEngineOptions.post_verification_token ?? ''}>\n                <FormInput />\n              </field.Basic>\n            </GeneralSettingsField>\n          </SubSection>\n        </Section>\n      </SecondaryNavigatorLayout>\n    </GeneralSettingsForm>\n  );\n}\n\nconst updatableFields = ['name', 'llm_id', 'fast_llm_id', 'reranker_id', 'engine_options', 'is_default', 'is_public'] as const;\n\nfunction optionAccessor<K extends keyof ChatEngineOptions> (key: K): GeneralSettingsFieldAccessor<ChatEngine, ChatEngineOptions[K]> {\n  return {\n    path: ['engine_options', key],\n    get (engine) {\n      return engine.engine_options[key];\n    },\n    set (engine, value) {\n      return {\n        ...engine,\n        engine_options: {\n          ...engine.engine_options,\n          [key]: value,\n        },\n      };\n    },\n  };\n}\n\nfunction kgOptionAccessor<K extends keyof ChatEngineKnowledgeGraphOptions> (key: K): GeneralSettingsFieldAccessor<ChatEngine, ChatEngineKnowledgeGraphOptions[K]> {\n  return {\n    path: ['engine_options', 'knowledge_graph', key],\n    get (engine) {\n      return engine.engine_options.knowledge_graph?.[key];\n    },\n    set (engine, value) {\n      return {\n        ...engine,\n        engine_options: {\n          ...engine.engine_options,\n          knowledge_graph: {\n            ...engine.engine_options.knowledge_graph,\n            [key]: value,\n          },\n        },\n      };\n    },\n  };\n}\n\nfunction llmOptionAccessor<K extends keyof ChatEngineLLMOptions> (key: K): GeneralSettingsFieldAccessor<ChatEngine, ChatEngineLLMOptions[K]> {\n  return {\n    path: ['engine_options', 'llm', key],\n    get (engine) {\n      return engine.engine_options.llm?.[key];\n    },\n    set (engine, value) {\n      return {\n        ...engine,\n        engine_options: {\n          ...engine.engine_options,\n          llm: {\n            ...engine.engine_options.llm,\n            [key]: value,\n          },\n        },\n      };\n    },\n  };\n}\n\nconst getDatetimeAccessor = (key: KeyOfType<ChatEngine, Date>): GeneralSettingsFieldAccessor<ChatEngine, string> => {\n  return {\n    path: [key],\n    get (data) {\n      return format(data[key], 'yyyy-MM-dd HH:mm:ss');\n    },\n    set () {\n      throw new Error(`update ${key} is not supported`);\n    },\n  };\n};\n\nconst idAccessor = fieldAccessor<ChatEngine, 'id'>('id');\n\nconst createdAccessor = getDatetimeAccessor('created_at');\nconst updatedAccessor = getDatetimeAccessor('updated_at');\nconst neverSchema = z.never();\n\nconst nameAccessor = fieldAccessor<ChatEngine, 'name'>('name');\nconst nameSchema = z.string().min(1);\n\nconst clarifyAccessor = optionAccessor('clarify_question');\nconst clarifyAccessorSchema = z.boolean().nullable().optional();\n\nconst isDefaultAccessor = fieldAccessor<ChatEngine, 'is_default'>('is_default');\nconst isDefaultSchema = z.boolean();\n\nconst isPublicAccessor = fieldAccessor<ChatEngine, 'is_public'>('is_public');\nconst isPublicSchema = z.boolean();\n\nconst getIdAccessor = (id: KeyOfType<ChatEngine, number | null>) => fieldAccessor<ChatEngine, KeyOfType<ChatEngine, number | null>>(id);\nconst idSchema = z.number().nullable();\nconst llmIdAccessor = getIdAccessor('llm_id');\nconst fastLlmIdAccessor = getIdAccessor('fast_llm_id');\nconst rerankerIdAccessor = getIdAccessor('reranker_id');\n\nconst kbAccessor: GeneralSettingsFieldAccessor<ChatEngine, number[] | null> = {\n  path: ['engine_options'],\n  get (data) {\n    console.log(data.engine_options.knowledge_base?.linked_knowledge_bases?.map(kb => kb.id) ?? null);\n    return data.engine_options.knowledge_base?.linked_knowledge_bases?.map(kb => kb.id) ?? null;\n  },\n  set (data, value) {\n    return {\n      ...data,\n      engine_options: {\n        ...data.engine_options,\n        knowledge_base: {\n          linked_knowledge_base: undefined,\n          linked_knowledge_bases: value?.map(id => ({ id })) ?? null,\n        },\n      },\n    };\n  },\n};\nconst kbSchema = z.number().array().min(1);\n\nconst kgEnabledAccessor = kgOptionAccessor('enabled');\nconst kgEnabledSchema = z.boolean().nullable();\n\nconst kgWithDegreeAccessor = kgOptionAccessor('with_degree');\nconst kgWithDegreeSchema = z.boolean().nullable();\n\nconst kgIncludeMetaAccessor = kgOptionAccessor('include_meta');\nconst kgIncludeMetaSchema = z.boolean().nullable();\n\nconst kgUsingIntentSearchAccessor = kgOptionAccessor('using_intent_search');\nconst kgUsingIntentSearchSchema = z.boolean().nullable();\n\nconst kgDepthAccessor = kgOptionAccessor('depth');\nconst kgDepthSchema = z.number().int().min(1).nullable();\n\nconst hideSourcesAccessor = optionAccessor('hide_sources');\nconst hideSourcesSchema = z.boolean().nullable();\n\nconst llmPromptFields = [\n  'condense_question_prompt',\n  'text_qa_prompt',\n  'intent_graph_knowledge',\n  'normal_graph_knowledge',\n  'clarifying_question_prompt',\n  'generate_goal_prompt',\n  'further_questions_prompt',\n] as const;\n\nconst llmAccessor: { [P in (typeof llmPromptFields[number])]: GeneralSettingsFieldAccessor<ChatEngine, string | null> } = Object.fromEntries(llmPromptFields.map(name => [name, llmOptionAccessor(name)])) as never;\nconst llmSchema = z.string().nullable();\n\nconst postVerificationUrlAccessor = optionAccessor('post_verification_url');\nconst postVerificationUrlSchema = z.string().nullable();\n\nconst postVerificationTokenAccessor = optionAccessor('post_verification_token');\nconst postVerificationTokenSchema = z.string().nullable();\n\nconst externalEngineAccessor: GeneralSettingsFieldAccessor<ChatEngine, string | null> = {\n  path: ['engine_options'],\n  get (engine) {\n    return engine.engine_options.external_engine_config?.stream_chat_api_url ?? null;\n  },\n  set (engine, value) {\n    return {\n      ...engine,\n      engine_options: {\n        ...engine.engine_options,\n        external_engine_config: {\n          stream_chat_api_url: value,\n        },\n      },\n    };\n  },\n};\nconst externalEngineSchema = z.string().nullable();\n\nfunction Section ({ title, children }: { title: string, children: ReactNode }) {\n  return (\n    <>\n      <SecondaryNavigatorMain className=\"max-w-screen-sm space-y-8 px-2 pb-8\" value={title} strategy=\"mount\">\n        {children}\n      </SecondaryNavigatorMain>\n    </>\n  );\n}\n\nfunction SubSection ({ title, children }: { title: ReactNode, children: ReactNode }) {\n  return (\n    <section className=\"space-y-4\">\n      <h4 className=\"text-lg\">{title}</h4>\n      {children}\n    </section>\n  );\n}\n\nconst promptDescriptions: Record<typeof llmPromptFields[number], string> = {\n  'condense_question_prompt': 'Prompt template for condensing a conversation history and follow-up question into a standalone question',\n  'text_qa_prompt': 'Prompt template for generating answers based on provided context and question',\n  'intent_graph_knowledge': 'Prompt template for processing and extracting knowledge from graph-based traversal methods',\n  'normal_graph_knowledge': 'Prompt template for processing and extracting knowledge from graph-based traversal methods',\n  'clarifying_question_prompt': 'Prompt template for generating clarifying questions when the user\\'s input needs more context or specificity',\n  'generate_goal_prompt': 'Prompt template for generating conversation goals and objectives based on user input',\n  'further_questions_prompt': 'Prompt template for generating follow-up questions to continue the conversation',\n};\n"
  },
  {
    "path": "frontend/app/src/components/code-theme.scss",
    "content": "pre code.hljs {\n  display: block;\n  overflow-x: auto;\n  padding: 1em\n}\ncode.hljs {\n  padding: 3px 5px\n}\n/*!\n  Theme: GitHub\n  Description: Light theme as seen on github.com\n  Author: github.com\n  Maintainer: @Hirse\n  Updated: 2021-05-15\n\n  Outdated base version: https://github.com/primer/github-syntax-light\n  Current colors taken from GitHub's CSS\n*/\n.hljs {\n  color: #24292e;\n  background: #ffffff\n}\n.hljs-doctag,\n.hljs-keyword,\n.hljs-meta .hljs-keyword,\n.hljs-template-tag,\n.hljs-template-variable,\n.hljs-type,\n.hljs-variable.language_ {\n  /* prettylights-syntax-keyword */\n  color: #d73a49\n}\n.hljs-title,\n.hljs-title.class_,\n.hljs-title.class_.inherited__,\n.hljs-title.function_ {\n  /* prettylights-syntax-entity */\n  color: #6f42c1\n}\n.hljs-attr,\n.hljs-attribute,\n.hljs-literal,\n.hljs-meta,\n.hljs-number,\n.hljs-operator,\n.hljs-variable,\n.hljs-selector-attr,\n.hljs-selector-class,\n.hljs-selector-id {\n  /* prettylights-syntax-constant */\n  color: #005cc5\n}\n.hljs-regexp,\n.hljs-string,\n.hljs-meta .hljs-string {\n  /* prettylights-syntax-string */\n  color: #032f62\n}\n.hljs-built_in,\n.hljs-symbol {\n  /* prettylights-syntax-variable */\n  color: #e36209\n}\n.hljs-comment,\n.hljs-code,\n.hljs-formula {\n  /* prettylights-syntax-comment */\n  color: #6a737d\n}\n.hljs-name,\n.hljs-quote,\n.hljs-selector-tag,\n.hljs-selector-pseudo {\n  /* prettylights-syntax-entity-tag */\n  color: #22863a\n}\n.hljs-subst {\n  /* prettylights-syntax-storage-modifier-import */\n  color: #24292e\n}\n.hljs-section {\n  /* prettylights-syntax-markup-heading */\n  color: #005cc5;\n  font-weight: bold\n}\n.hljs-bullet {\n  /* prettylights-syntax-markup-list */\n  color: #735c0f\n}\n.hljs-emphasis {\n  /* prettylights-syntax-markup-italic */\n  color: #24292e;\n  font-style: italic\n}\n.hljs-strong {\n  /* prettylights-syntax-markup-bold */\n  color: #24292e;\n  font-weight: bold\n}\n.hljs-addition {\n  /* prettylights-syntax-markup-inserted */\n  color: #22863a;\n  background-color: #f0fff4\n}\n.hljs-deletion {\n  /* prettylights-syntax-markup-deleted */\n  color: #b31d28;\n  background-color: #ffeef0\n}\n.hljs-char.escape_,\n.hljs-link,\n.hljs-params,\n.hljs-property,\n.hljs-punctuation,\n.hljs-tag {\n  /* purposely ignored */\n\n}\n\n.dark {\n  pre code.hljs {\n    display: block;\n    overflow-x: auto;\n    padding: 1em\n  }\n  code.hljs {\n    padding: 3px 5px\n  }\n  /*!\n    Theme: GitHub Dark\n    Description: Dark theme as seen on github.com\n    Author: github.com\n    Maintainer: @Hirse\n    Updated: 2021-05-15\n\n    Outdated base version: https://github.com/primer/github-syntax-dark\n    Current colors taken from GitHub's CSS\n  */\n  .hljs {\n    color: #c9d1d9;\n    background: #0d1117\n  }\n  .hljs-doctag,\n  .hljs-keyword,\n  .hljs-meta .hljs-keyword,\n  .hljs-template-tag,\n  .hljs-template-variable,\n  .hljs-type,\n  .hljs-variable.language_ {\n    /* prettylights-syntax-keyword */\n    color: #ff7b72\n  }\n  .hljs-title,\n  .hljs-title.class_,\n  .hljs-title.class_.inherited__,\n  .hljs-title.function_ {\n    /* prettylights-syntax-entity */\n    color: #d2a8ff\n  }\n  .hljs-attr,\n  .hljs-attribute,\n  .hljs-literal,\n  .hljs-meta,\n  .hljs-number,\n  .hljs-operator,\n  .hljs-variable,\n  .hljs-selector-attr,\n  .hljs-selector-class,\n  .hljs-selector-id {\n    /* prettylights-syntax-constant */\n    color: #79c0ff\n  }\n  .hljs-regexp,\n  .hljs-string,\n  .hljs-meta .hljs-string {\n    /* prettylights-syntax-string */\n    color: #a5d6ff\n  }\n  .hljs-built_in,\n  .hljs-symbol {\n    /* prettylights-syntax-variable */\n    color: #ffa657\n  }\n  .hljs-comment,\n  .hljs-code,\n  .hljs-formula {\n    /* prettylights-syntax-comment */\n    color: #8b949e\n  }\n  .hljs-name,\n  .hljs-quote,\n  .hljs-selector-tag,\n  .hljs-selector-pseudo {\n    /* prettylights-syntax-entity-tag */\n    color: #7ee787\n  }\n  .hljs-subst {\n    /* prettylights-syntax-storage-modifier-import */\n    color: #c9d1d9\n  }\n  .hljs-section {\n    /* prettylights-syntax-markup-heading */\n    color: #1f6feb;\n    font-weight: bold\n  }\n  .hljs-bullet {\n    /* prettylights-syntax-markup-list */\n    color: #f2cc60\n  }\n  .hljs-emphasis {\n    /* prettylights-syntax-markup-italic */\n    color: #c9d1d9;\n    font-style: italic\n  }\n  .hljs-strong {\n    /* prettylights-syntax-markup-bold */\n    color: #c9d1d9;\n    font-weight: bold\n  }\n  .hljs-addition {\n    /* prettylights-syntax-markup-inserted */\n    color: #aff5b4;\n    background-color: #033a16\n  }\n  .hljs-deletion {\n    /* prettylights-syntax-markup-deleted */\n    color: #ffdcd7;\n    background-color: #67060c\n  }\n  .hljs-char.escape_,\n  .hljs-link,\n  .hljs-params,\n  .hljs-property,\n  .hljs-punctuation,\n  .hljs-tag {\n    /* purposely ignored */\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/config-viewer.tsx",
    "content": "'use client';\n\nimport Highlight from 'highlight.js/lib/core';\nimport json from 'highlight.js/lib/languages/json';\nimport { useEffect, useState } from 'react';\nimport './code-theme.scss';\n\nHighlight.registerLanguage('json', json);\n\nexport function ConfigViewer ({ value: propValue }: { value: any }) {\n  const [value, setValue] = useState(() => {\n    if (propValue === undefined) {\n      return '';\n    }\n    try {\n      return JSON.stringify(propValue, undefined, 2);\n    } catch {\n      return '/// FAILED TO STRINGIFY JSON';\n    }\n  });\n\n  useEffect(() => {\n    if (propValue === undefined) {\n      return;\n    }\n    try {\n      const string = JSON.stringify(propValue, undefined, 2);\n      setValue(string);\n      const { value: result } = Highlight.highlight(string, { language: 'json' });\n      setValue(result);\n    } catch {\n    }\n  }, [propValue]);\n\n  return (\n    <code className=\"block p-2 rounded bg-accent\">\n      <pre className=\"whitespace-pre-wrap text-xs font-mono\" dangerouslySetInnerHTML={{ __html: value }} />\n    </code>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/copy-button.tsx",
    "content": "'use client';\n\nimport { Button, type ButtonProps } from '@/components/ui/button';\nimport { cn } from '@/lib/utils';\nimport copy from 'copy-to-clipboard';\nimport { ClipboardCheckIcon, ClipboardIcon } from 'lucide-react';\nimport { useLayoutEffect, useState } from 'react';\n\nexport interface CopyButtonProps extends Omit<ButtonProps, 'children' | 'type'> {\n  text: string | (() => string);\n  autoCopy?: boolean;\n}\n\nexport function CopyButton ({ text, className, onClick, autoCopy, ...props }: CopyButtonProps) {\n  const [copied, setCopied] = useState(false);\n\n  useLayoutEffect(() => {\n    setCopied(false);\n    if (autoCopy) {\n      setCopied(copy(typeof text === 'string' ? text : text()));\n    }\n  }, [text]);\n\n  return (\n    <Button\n      size=\"icon\"\n      variant=\"ghost\"\n      className={cn('rounded-full w-7 h-7 transition-colors text-foreground', copied && 'text-success hover:text-success hover:bg-success/10')}\n      onClick={(event) => {\n        onClick?.(event);\n        if (!event.defaultPrevented) {\n          setCopied(copy(typeof text === 'string' ? text : text()));\n        }\n      }}\n      {...props}\n      type=\"button\"\n    >\n      {copied\n        ? <ClipboardCheckIcon className=\"w-4 h-4\" />\n        : <ClipboardIcon className=\"w-4 h-4\" />}\n    </Button>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/dangerous-action-button.tsx",
    "content": "import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent, AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle, AlertDialogTrigger } from '@/components/ui/alert-dialog';\nimport { Button, type ButtonProps, buttonVariants } from '@/components/ui/button';\nimport { getErrorMessage } from '@/lib/errors';\nimport { cn } from '@/lib/utils';\nimport { AlertTriangleIcon, Loader2Icon } from 'lucide-react';\nimport { forwardRef, MouseEvent, type ReactNode, useState } from 'react';\n\nexport interface DangerousActionButtonProps extends ButtonProps {\n  action: () => Promise<void>;\n  dialogTitle?: ReactNode;\n  dialogDescription?: ReactNode;\n  actionDisabled?: boolean;\n  actionDisabledReason?: ReactNode;\n}\n\nexport const DangerousActionButton = forwardRef<HTMLButtonElement, DangerousActionButtonProps>(({ action, dialogDescription, dialogTitle, actionDisabledReason, actionDisabled, asChild, ...props }, ref) => {\n  const [open, setOpen] = useState(false);\n  const [acting, setActing] = useState(false);\n  const [error, setError] = useState<unknown>();\n\n  const handleClick = (event: MouseEvent<HTMLButtonElement>) => {\n    event.preventDefault();\n    setActing(true);\n    action()\n      .then(() => {\n        setOpen(false);\n      })\n      .catch(error => setError(error))\n      .finally(() => setActing(false));\n  };\n\n  return (\n    <AlertDialog open={open} onOpenChange={setOpen}>\n      {asChild\n        ? <AlertDialogTrigger asChild ref={ref} {...props} disabled={props.disabled || acting} />\n        : (\n          <AlertDialogTrigger asChild>\n            <Button variant=\"destructive\" ref={ref} {...props} disabled={props.disabled || acting} />\n          </AlertDialogTrigger>\n        )}\n      <AlertDialogContent>\n        <AlertDialogHeader>\n          <AlertDialogTitle>{dialogTitle ?? 'Are you absolutely sure?'}</AlertDialogTitle>\n          <AlertDialogDescription>\n            {dialogDescription ?? 'This action cannot be undone.'}\n          </AlertDialogDescription>\n        </AlertDialogHeader>\n        {!!error && <Alert variant=\"destructive\" className={cn('transition-opacity', acting && 'opacity-50')}>\n          <AlertTriangleIcon />\n          <AlertTitle>Failed to perform operation</AlertTitle>\n          <AlertDescription>{getErrorMessage(error)}</AlertDescription>\n        </Alert>}\n        {actionDisabled && actionDisabledReason}\n        <AlertDialogFooter>\n          <AlertDialogCancel className={cn('border-none', buttonVariants({ variant: 'ghost' }))}>Cancel</AlertDialogCancel>\n          <AlertDialogAction className={buttonVariants({ variant: 'destructive' })} disabled={actionDisabled || acting} onClick={handleClick}>\n            {acting && <Loader2Icon className=\"size-4 mr-1 animate-spin repeat-infinite\" />}\n            Continue\n          </AlertDialogAction>\n        </AlertDialogFooter>\n      </AlertDialogContent>\n    </AlertDialog>\n  );\n});\n\nDangerousActionButton.displayName = 'DangerousActionButton';\n"
  },
  {
    "path": "frontend/app/src/components/data-table-heading.tsx",
    "content": "import type { ReactNode } from 'react';\n\nexport function DataTableHeading ({ children }: { children: ReactNode }) {\n  return (\n    <div className=\"flex items-center gap-2\">\n      {children}\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/data-table-remote.tsx",
    "content": "import { Loader } from '@/components/loader';\nimport { RowCheckbox } from '@/components/row-checkbox';\nimport { Button } from '@/components/ui/button';\nimport { Pagination, PaginationContent, PaginationEllipsis, PaginationItem } from '@/components/ui/pagination';\nimport { Select, SelectContent, SelectItem, SelectTrigger } from '@/components/ui/select';\n\nimport { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from '@/components/ui/table';\nimport { TooltipProvider } from '@/components/ui/tooltip';\nimport { DataTableProvider } from '@/components/use-data-table';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { Page, PageParams } from '@/lib/request';\nimport { cn } from '@/lib/utils';\nimport { ColumnDef, type ColumnFilter, flexRender, getCoreRowModel, getSortedRowModel, SortingState, Table as ReactTable, useReactTable } from '@tanstack/react-table';\nimport type { CellContext, PaginationState, RowData } from '@tanstack/table-core';\nimport { ChevronLeft, ChevronRight } from 'lucide-react';\nimport { Fragment, type ReactNode, useEffect, useMemo, useState } from 'react';\nimport useSWR from 'swr';\n\ndeclare module '@tanstack/table-core' {\n  interface ColumnMeta<TData extends RowData, TValue> {\n    colSpan?: number | ((context: CellContext<TData, TValue>) => number);\n  }\n}\n\nexport interface PageApiOptions {\n  globalFilter: string;\n}\n\ninterface DataTableRemoteProps<TData, TValue> {\n  idColumn: keyof TData;\n  apiKey: string;\n  api: (page: PageParams, options: PageApiOptions) => Promise<Page<TData>>;\n  apiDeps?: unknown[];\n  columns: ColumnDef<TData, TValue>[];\n  selectable?: boolean;\n  batchOperations?: (rows: string[], revalidate: () => void) => ReactNode;\n  refreshInterval?: number | ((data: Page<TData> | undefined) => number);\n  /**\n   * @deprecated\n   */\n  before?: ReactNode;\n  /**\n   * @deprecated\n   */\n  after?: ReactNode;\n  toolbar?: (table: ReactTable<TData>) => ReactNode;\n  defaultSorting?: SortingState;\n}\n\nexport function DataTableRemote<TData, TValue> ({\n  idColumn,\n  api,\n  apiKey,\n  columns,\n  apiDeps = [],\n  selectable = false,\n  batchOperations,\n  refreshInterval,\n  before,\n  after,\n  toolbar,\n  defaultSorting = [],\n}: DataTableRemoteProps<TData, TValue>) {\n  const [pagination, setPagination] = useState<PaginationState>(() => {\n    return { pageIndex: 0, pageSize: 10 };\n  });\n  const [rowSelection, setRowSelection] = useState({});\n  const [columnFilters, setColumnFilters] = useState<ColumnFilter[]>([]);\n  const [globalFilter, setGlobalFilter] = useState('');\n  const [sorting, setSorting] = useState<SortingState>(defaultSorting);\n\n  const idSelection = useMemo(() => {\n    return Object.keys(rowSelection);\n  }, [rowSelection]);\n\n  // Fetch data.\n  const { data, mutate, error, isLoading, isValidating } = useSWR(`${apiKey}?page=${pagination.pageIndex}&size=${pagination.pageSize}${globalFilter && `&query=${globalFilter}`}`, () => api({ page: pagination.pageIndex + 1, size: pagination.pageSize }, { globalFilter }), {\n    refreshInterval,\n    revalidateOnReconnect: false,\n    revalidateOnFocus: false,\n    focusThrottleInterval: 1000,\n    keepPreviousData: true,\n    onError: console.error,\n  });\n\n  useEffect(() => {\n    void mutate();\n  }, [pagination.pageSize, pagination.pageIndex, globalFilter, ...apiDeps]);\n\n  // Column definitions.\n  columns = useMemo(() => {\n    if (!selectable) {\n      return columns;\n    }\n\n    return [\n      {\n        id: 'select',\n        header: ({ table }) => (\n          <RowCheckbox\n            onClick={table.getToggleAllRowsSelectedHandler()}\n            checked={table.getIsAllRowsSelected()}\n            indeterminate={table.getIsSomeRowsSelected()}\n          />\n        ),\n        cell: ({ row }) => (\n          <div>\n            <RowCheckbox\n              onClick={row.getToggleSelectedHandler()}\n              checked={row.getIsSelected()}\n              indeterminate={row.getIsSomeSelected()}\n              disabled={!row.getCanSelect()}\n            />\n          </div>\n        ),\n      },\n      ...columns,\n    ];\n  }, [columns, selectable]);\n\n  const table = useReactTable({\n    data: data?.items ?? [],\n    columns,\n    state: {\n      sorting,\n      pagination,\n      rowSelection,\n      columnFilters,\n      globalFilter,\n    },\n    pageCount: data ? data.pages : 1,\n    manualPagination: true,\n    manualFiltering: true,\n    enableRowSelection: selectable,\n    enableMultiRowSelection: selectable,\n    enableColumnFilters: true,\n    enableGlobalFilter: true,\n    onSortingChange: async (val) => {\n      await mutate();\n      setSorting(val);\n    },\n    onPaginationChange: setPagination,\n    onRowSelectionChange: setRowSelection,\n    onColumnFiltersChange: setColumnFilters,\n    onGlobalFilterChange: setGlobalFilter,\n    getSortedRowModel: getSortedRowModel(),\n    getCoreRowModel: getCoreRowModel(),\n    getRowId: item => String(item[idColumn]),\n  });\n\n  return (\n    <DataTableProvider\n      value={{\n        ...table,\n        reload: () => { mutate(); },\n        loading: isLoading,\n      }}\n    >\n      {before}\n      {toolbar ? toolbar(table) : null}\n      <TooltipProvider>\n        <div className=\"rounded-md border relative\">\n          <Table className=\"text-xs whitespace-nowrap\">\n            <TableHeader>\n              {table.getHeaderGroups().map((headerGroup) => (\n                <TableRow key={headerGroup.id}>\n                  {headerGroup.headers.map((header) => {\n                    return (\n                      <TableHead key={header.id} colSpan={header.colSpan}>\n                        {header.isPlaceholder\n                          ? null\n                          : flexRender(\n                            header.column.columnDef.header,\n                            header.getContext(),\n                          )}\n                      </TableHead>\n                    );\n                  })}\n                </TableRow>\n              ))}\n            </TableHeader>\n            <TableBody>\n              {table.getRowModel().rows?.length ? (\n                table.getRowModel().rows.map((row) => (\n                  <TableRow\n                    key={row.id}\n                    data-state={row.getIsSelected() && 'selected'}\n                  >\n                    {row.getVisibleCells().map((cell) => {\n                      // Col span for advanced customization.\n                      const span = getColSpan(cell.column.columnDef, cell.getContext());\n\n                      if (span === 0) {\n                        return <Fragment key={cell.id} />;\n                      }\n\n                      return (\n                        <TableCell key={cell.id} colSpan={span}>\n                          {flexRender(cell.column.columnDef.cell, cell.getContext())}\n                        </TableCell>\n                      );\n                    })}\n                  </TableRow>\n                ))\n              ) : (\n                <TableRow>\n                  <TableCell colSpan={columns.length} className={cn('h-24 text-center', !!error && 'text-destructive')}>\n                    {error\n                      ? `Failed to load data: ${getErrorMessage(error)}`\n                      : 'Empty List'}\n                  </TableCell>\n                </TableRow>\n              )}\n            </TableBody>\n          </Table>\n          <Loader loading={isLoading || isValidating} />\n        </div>\n        <div className=\"flex w-full gap-2 py-4\">\n          {selectable && (\n            <>\n              <span className=\"text-xs text-secondary-foreground\">\n                Selected {Object.keys(rowSelection).length} rows\n              </span>\n              {batchOperations?.(idSelection, () => mutate())}\n            </>\n          )}\n          <TablePagination className=\"mx-0 ml-auto w-max\" loading={isLoading} table={table} />\n        </div>\n      </TooltipProvider>\n      {after}\n    </DataTableProvider>\n  );\n}\n\nfunction getSortingSearchString (sorting: SortingState) {\n  return sorting.map(({ id, desc }) => `${id}:${desc ? 'desc' : 'asc'}`).join(',');\n}\n\nconst sizes = [10, 20, 50, 100];\n\nfunction TablePagination ({ className, limit = 4, loading, table }: { className?: string, limit?: number, loading: boolean, table: ReactTable<any> }) {\n  const options = table.getPageOptions();\n  const pagination = table.getState().pagination;\n\n  const min = Math.max(pagination.pageIndex - limit / 2, 0);\n  const max = Math.min(min + limit + 1, options.length - 1);\n\n  if (min >= max) {\n    return <span className={className} />;\n  }\n\n  return (\n    <Pagination className={className}>\n      <Select value={String(pagination.pageSize)} onValueChange={value => table.setPageSize(Number(value))}>\n        <SelectTrigger className=\"w-max\">\n          {pagination.pageSize} / page\n        </SelectTrigger>\n        <SelectContent>\n          {sizes.map(size => (\n            <SelectItem key={size} value={String(size)}>\n              {size}\n            </SelectItem>\n          ))}\n        </SelectContent>\n      </Select>\n      <PaginationContent>\n        <PaginationItem>\n          <Button variant=\"ghost\" size=\"icon\" disabled={loading || !table.getCanPreviousPage()} onClick={() => table.previousPage()}>\n            <ChevronLeft />\n          </Button>\n        </PaginationItem>\n        {min > 0 && (\n          <PaginationItem>\n            <Button variant=\"ghost\" size=\"icon\" disabled={loading} onClick={() => table.setPageIndex(0)}>\n              1\n            </Button>\n          </PaginationItem>\n        )}\n        {min > 1 && (\n          <PaginationItem>\n            <PaginationEllipsis />\n          </PaginationItem>\n        )}\n        {steps(min, max).map((page) => (\n          <PaginationItem key={page}>\n            <Button\n              variant={page === pagination.pageIndex ? 'outline' : 'ghost'}\n              disabled={loading}\n              size=\"icon\"\n              onClick={() => table.setPageIndex(page)}\n            >\n              {page + 1}\n            </Button>\n          </PaginationItem>\n        ))}\n        {(max < options.length - 2) && (\n          <PaginationItem>\n            <PaginationEllipsis />\n          </PaginationItem>\n        )}\n        {(max < options.length - 1) && (\n          <PaginationItem>\n            <Button variant=\"ghost\" size=\"icon\" disabled={loading} onClick={() => table.setPageIndex(options.length - 1)}>\n              {options.length}\n            </Button>\n          </PaginationItem>\n        )}\n        <PaginationItem>\n          <Button variant=\"ghost\" size=\"icon\" disabled={loading || !table.getCanNextPage()} onClick={() => table.nextPage()}>\n            <ChevronRight />\n          </Button>\n        </PaginationItem>\n      </PaginationContent>\n    </Pagination>\n  );\n}\n\nfunction steps (from: number, to: number) {\n  if (from >= to) {\n    return [];\n  }\n  let arr = new Array(to - from + 1);\n  for (let i = from; i <= to; i++) {\n    arr[i - from] = i;\n  }\n\n  return arr;\n}\n\nfunction getColSpan<TData extends RowData, TValue> (columnDef: ColumnDef<TData, TValue>, context: CellContext<TData, TValue>) {\n  const colSpan = columnDef.meta?.colSpan;\n  if (colSpan == null) {\n    return undefined;\n  }\n  if (typeof colSpan === 'number') {\n    return colSpan;\n  }\n  return colSpan(context);\n}\n"
  },
  {
    "path": "frontend/app/src/components/data-table.tsx",
    "content": "'use client';\n\nimport { Loader } from '@/components/loader';\nimport { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from '@/components/ui/table';\nimport { DataTableProvider } from '@/components/use-data-table';\nimport { cn } from '@/lib/utils';\nimport { ColumnDef, flexRender, getCoreRowModel, useReactTable } from '@tanstack/react-table';\nimport type { ReactNode } from 'react';\n\ninterface DataTableClassNames {\n  table?: string;\n  tr?: string;\n  td?: string;\n  headTd?: string;\n  headTr?: string;\n}\n\ninterface DataTableProps<TData, TValue> {\n  className?: string;\n  before?: ReactNode;\n  after?: ReactNode;\n  hideHeader?: boolean;\n  columns: ColumnDef<TData, TValue>[];\n  data: TData[];\n  classNames?: DataTableClassNames;\n  loading?: boolean\n}\n\nexport function DataTable<TData, TValue> ({\n  className,\n  hideHeader,\n  columns,\n  data,\n  before,\n  after,\n  classNames,\n  loading,\n}: DataTableProps<TData, TValue>) {\n  const table = useReactTable({\n    data,\n    columns,\n    getCoreRowModel: getCoreRowModel(),\n  });\n\n  return (\n    <DataTableProvider value={table}>\n      {before}\n      <div className={cn('rounded-md border relative', className)}>\n        <Table className=\"text-xs whitespace-nowrap\">\n          {!hideHeader && <TableHeader>\n            {table.getHeaderGroups().map((headerGroup) => (\n              <TableRow key={headerGroup.id} className={classNames?.headTd}>\n                {headerGroup.headers.map((header) => {\n                  return (\n                    <TableHead key={header.id} className={classNames?.headTr}>\n                      {header.isPlaceholder\n                        ? null\n                        : flexRender(\n                          header.column.columnDef.header,\n                          header.getContext(),\n                        )}\n                    </TableHead>\n                  );\n                })}\n              </TableRow>\n            ))}\n          </TableHeader>}\n          <TableBody>\n            {table.getRowModel().rows?.length ? (\n              table.getRowModel().rows.map((row) => (\n                <TableRow\n                  key={row.id}\n                  className={classNames?.td}\n                  data-state={row.getIsSelected() && 'selected'}\n                >\n                  {row.getVisibleCells().map((cell) => (\n                    <TableCell key={cell.id} className={classNames?.td}>\n                      {flexRender(cell.column.columnDef.cell, cell.getContext())}\n                    </TableCell>\n                  ))}\n                </TableRow>\n              ))\n            ) : (\n              <TableRow>\n                <TableCell colSpan={columns.length} className=\"h-24 text-center\">\n                  No results.\n                </TableCell>\n              </TableRow>\n            )}\n          </TableBody>\n        </Table>\n        <Loader loading={!!loading} />\n      </div>\n      {after}\n    </DataTableProvider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/datasource/create-datasource-form.tsx",
    "content": "import { type BaseCreateDatasourceParams, createDatasource, type CreateDatasourceSpecParams, uploadFiles } from '@/api/datasources';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { FormRootError } from '@/components/form/root-error';\nimport { onSubmitHelper } from '@/components/form/utils';\nimport { FilesInput } from '@/components/form/widgets/FilesInput';\nimport { Button } from '@/components/ui/button';\nimport { Form, formDomEventHandlers, FormField, FormItem, FormLabel, useFormContext } from '@/components/ui/form.beta';\nimport { ToggleGroup, ToggleGroupItem } from '@/components/ui/toggle-group';\nimport { zodFile } from '@/lib/zod';\nimport { useForm } from '@tanstack/react-form';\nimport { FileDownIcon, GlobeIcon, PaperclipIcon } from 'lucide-react';\nimport { useSearchParams } from 'next/navigation';\nimport { useState } from 'react';\nimport { z } from 'zod';\n\nconst types = ['file', 'web_single_page', 'web_sitemap'] as const;\n\nconst isType = (value: string | null): value is typeof types[number] => types.includes(value as any);\n\nconst field = formFieldLayout<CreateDatasourceFormParams>();\nconst fileField = formFieldLayout<CreateDatasourceFormParams & { data_source_type: 'file' }>();\nconst sitemapField = formFieldLayout<CreateDatasourceFormParams & { data_source_type: 'web_sitemap' }>();\nconst pagesField = formFieldLayout<CreateDatasourceFormParams & { data_source_type: 'web_single_page' }>();\n\nexport function CreateDatasourceForm ({ knowledgeBaseId, transitioning, onCreated }: { knowledgeBaseId: number, transitioning?: boolean, onCreated?: () => void }) {\n  const usp = useSearchParams()!;\n  const uType = usp.get('type');\n\n  const [submissionError, setSubmissionError] = useState<unknown>(undefined);\n\n  const form = useForm<CreateDatasourceFormParams>({\n    validators: {\n      onSubmit: createDatasourceSchema,\n    },\n    defaultValues: switchDatasource({\n      data_source_type: 'file',\n      name: '',\n      files: [],\n    }, isType(uType) ? uType : 'file'),\n    onSubmit: onSubmitHelper(createDatasourceSchema, async (data) => {\n      const createParams = await preCreate(data);\n      await createDatasource(knowledgeBaseId, createParams);\n      onCreated?.();\n    }, setSubmissionError),\n  });\n\n  return (\n    <Form form={form} disabled={transitioning} submissionError={submissionError}>\n      <form className=\"max-w-screen-sm space-y-4\" {...formDomEventHandlers(form, transitioning)}>\n        <DataSourceTypeField />\n        <DataSourceTypeSpecFields />\n        <field.Basic name=\"name\" label=\"Datasource Name\" required>\n          <FormInput />\n        </field.Basic>\n        <FormRootError title='Failed to create datasource' />\n        <Button type=\"submit\" disabled={form.state.isSubmitting}>\n          Create\n        </Button>\n      </form>\n    </Form>\n  );\n}\n\nfunction DataSourceTypeField () {\n  return (\n    <FormField<CreateDatasourceFormParams, 'data_source_type'>\n      name=\"data_source_type\"\n      render={(field, form) => (\n        <FormItem>\n          <FormLabel>\n            Data Source Type\n          </FormLabel>\n          <ToggleGroup\n            className=\"w-max\"\n            type=\"single\"\n            value={field.state.value}\n            onValueChange={(value => {\n              form.reset(switchDatasource(form.state.values, value as never));\n            })}\n            onBlur={field.handleBlur}\n          >\n            <ToggleGroupItem value=\"file\">\n              <PaperclipIcon className=\"size-4 mr-2\" />\n              File\n            </ToggleGroupItem>\n            <ToggleGroupItem value=\"web_single_page\">\n              <FileDownIcon className=\"size-4 mr-2\" />\n              Web Single Page\n            </ToggleGroupItem>\n            <ToggleGroupItem value=\"web_sitemap\">\n              <GlobeIcon className=\"size-4 mr-2\" />\n              Web Sitemap\n            </ToggleGroupItem>\n          </ToggleGroup>\n        </FormItem>\n      )}\n    />\n  );\n}\n\nfunction DataSourceTypeSpecFields () {\n  const { form } = useFormContext<CreateDatasourceFormParams>();\n\n  return (\n    <form.Subscribe selector={state => state.values.data_source_type}>\n      {(type) => (\n        <>\n          {type === 'file' && (\n            <fileField.Basic name=\"files\" label=\"Files\" description=\"Currently support Markdown (*.md), PDF (*.pdf), Microsoft Word (*.docx), Microsoft PowerPoint (*.pptx), Microsoft Excel (*.xlsx) and Text (*.txt) files.\" required>\n              <FilesInput accept={['text/plain', 'application/pdf', '.md', '.docx', '.pptx', '.xlsx']} />\n            </fileField.Basic>\n          )}\n          {type === 'web_single_page' && (\n            <pagesField.PrimitiveArray name=\"urls\" label=\"Page URL\" newItemValue={() => ''}>\n              <FormInput placeholder=\"https://example.com/\" required />\n            </pagesField.PrimitiveArray>\n          )}\n          {type === 'web_sitemap' && (\n            <sitemapField.Basic name=\"url\" label=\"Sitemap URL\">\n              <FormInput placeholder=\"https://example.com/sitemap.xml\" required />\n            </sitemapField.Basic>\n          )}\n        </>\n      )}\n    </form.Subscribe>\n  );\n}\n\nexport type CreateDatasourceFormParams = z.infer<typeof createDatasourceSchema>;\n\nexport const createDatasourceSchema = z.object({\n  name: z.string().trim().min(1, 'Must not blank'),\n}).and(z.discriminatedUnion('data_source_type', [\n  z.object({\n    data_source_type: z.literal('file'),\n    files: zodFile().array().min(1),\n  }),\n  z.object({\n    data_source_type: z.literal('web_single_page'),\n    urls: z.string().url().array().min(1),\n  }),\n  z.object({\n    data_source_type: z.literal('web_sitemap'),\n    url: z.string().url(),\n  }),\n]));\n\nfunction switchDatasource (data: CreateDatasourceFormParams, type: CreateDatasourceSpecParams['data_source_type']): CreateDatasourceFormParams {\n  if (data.data_source_type === type) {\n    return data;\n  }\n\n  switch (type) {\n    case 'file':\n      return {\n        name: data.name,\n        data_source_type: 'file',\n        files: [],\n      };\n    case 'web_single_page':\n      return {\n        name: data.name,\n        data_source_type: 'web_single_page',\n        urls: [],\n      };\n    case 'web_sitemap':\n      return {\n        name: data.name,\n        data_source_type: 'web_sitemap',\n        url: '',\n      };\n  }\n}\n\nasync function preCreate (ds: CreateDatasourceFormParams): Promise<BaseCreateDatasourceParams & CreateDatasourceSpecParams> {\n  switch (ds.data_source_type) {\n    case 'file': {\n      const { files, ...rest } = ds;\n      const uploadedFiles = await uploadFiles(ds.files);\n      return {\n        ...rest,\n        config: uploadedFiles.map(f => ({\n          file_id: f.id,\n          file_name: f.name,\n        })),\n      };\n    }\n    case 'web_single_page': {\n      const { urls, ...rest } = ds;\n\n      return {\n        ...rest,\n        config: { urls },\n      };\n    }\n\n    case 'web_sitemap':\n      const { url, ...rest } = ds;\n\n      return {\n        ...rest,\n        config: { url },\n      };\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/datasource/datasource-card.tsx",
    "content": "'use client';\n\nimport { type Datasource, deleteDatasource } from '@/api/datasources';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { UpdateDatasourceForm } from '@/components/datasource/update-datasource-form';\nimport { mutateKnowledgeBaseDataSources } from '@/components/knowledge-base/hooks';\nimport { ManagedDialog } from '@/components/managed-dialog';\nimport { ManagedPanelContext } from '@/components/managed-panel';\nimport { Button } from '@/components/ui/button';\nimport { Card, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';\nimport { DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';\nimport { FileDownIcon, GlobeIcon, PaperclipIcon } from 'lucide-react';\nimport { useRouter } from 'next/navigation';\n\nexport function DatasourceCard ({ knowledgeBaseId, datasource }: { knowledgeBaseId: number, datasource: Datasource }) {\n  const router = useRouter();\n\n  return (\n    <Card key={datasource.id}>\n      <CardHeader className=\"p-3\">\n        <CardTitle className=\"text-base\">{datasource.name}</CardTitle>\n        <CardDescription className=\"text-xs\">\n          <DatasourceCardDetails datasource={datasource} />\n        </CardDescription>\n      </CardHeader>\n      <CardFooter className=\"gap-2 p-3 pt-0\">\n        <ManagedDialog>\n          <DialogTrigger asChild>\n            <Button variant=\"ghost\" size=\"sm\">Configure</Button>\n          </DialogTrigger>\n          <DialogContent>\n            <DialogHeader>\n              <DialogTitle>Configure Datasource</DialogTitle>\n              <DialogDescription />\n            </DialogHeader>\n            <ManagedPanelContext.Consumer>\n              {({ setOpen }) => (\n                <UpdateDatasourceForm\n                  knowledgeBaseId={knowledgeBaseId}\n                  datasource={datasource}\n                  onUpdated={() => {\n                    router.refresh();\n                    void mutateKnowledgeBaseDataSources(knowledgeBaseId);\n                    setOpen(false);\n                  }}\n                />\n              )}\n            </ManagedPanelContext.Consumer>\n          </DialogContent>\n        </ManagedDialog>\n        <DangerousActionButton\n          action={async () => {\n            await deleteDatasource(knowledgeBaseId, datasource.id);\n          }}\n          asChild\n          dialogTitle={`Confirm to delete the datasource ${datasource.name} #${datasource.id}`}\n          dialogDescription={<>All <b>documents</b>, <b>chunks</b>, <b>entities</b> and <b>relationships</b> related to this datasource will be <b>deleted</b>. This action cannot be undone.</>}\n        >\n          <Button variant=\"ghost\" className=\"hover:text-destructive hover:bg-destructive/10\" size=\"sm\">Delete</Button>\n        </DangerousActionButton>\n      </CardFooter>\n    </Card>\n  );\n}\n\nfunction DatasourceCardDetails ({ datasource }: { datasource: Datasource }) {\n  return (\n    <span className=\"flex gap-1 items-center\">\n      {(() => {\n        switch (datasource.data_source_type) {\n          case 'web_sitemap':\n            return <GlobeIcon className=\"size-3\" />;\n          case 'web_single_page':\n            return <FileDownIcon className=\"size-3\" />;\n          case 'file':\n            return <PaperclipIcon className=\"size-3\" />;\n        }\n      })()}\n      <span>\n        {(() => {\n          switch (datasource.data_source_type) {\n            case 'web_sitemap':\n              return datasource.config.url;\n            case 'web_single_page':\n              return datasource.config.urls.join(', ');\n            case 'file':\n              if (datasource.config.length === 1) {\n                return datasource.config[0].file_name;\n              } else {\n                return (\n                  <>\n                    {datasource.config[0]?.file_name}\n                    {(datasource.config.length > 1) && <Popover>\n                      <PopoverTrigger className=\"ml-2 font-medium\">\n                        +{datasource.config.length - 1} files\n                      </PopoverTrigger>\n                      <PopoverContent className=\"flex flex-wrap gap-2 text-xs text-muted-foreground\">\n                        {datasource.config.slice(1).map(file => (\n                          <span key={file.file_id}>{file.file_name}</span>\n                        ))}\n                      </PopoverContent>\n                    </Popover>}\n                  </>\n                );\n              }\n          }\n        })()}\n      </span>\n    </span>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/datasource/datasource-create-option.tsx",
    "content": "'use client';\n\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport type { ReactNode } from 'react';\n\nexport function DatasourceCreateOption ({\n  knowledgeBaseId,\n  type,\n  icon,\n  title,\n  children,\n}: {\n  knowledgeBaseId: number\n  type: string\n  icon?: ReactNode\n  title: ReactNode\n  children?: ReactNode\n}) {\n  return (\n    <NextLink\n      href={`/knowledge-bases/${knowledgeBaseId}/data-sources/new?type=${type}`}\n      className=\"block space-y-2 h-auto\"\n      variant=\"secondary\"\n    >\n      <div className=\"break-words text-wrap\">\n        <span className=\"inline-flex items-center h-max mr-1 align-middle\">\n          {icon}\n        </span>\n        {title}\n      </div>\n      <div className=\"text-muted-foreground text-xs font-normal break-words text-wrap\">\n        {children}\n      </div>\n    </NextLink>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/datasource/no-datasource-placeholder.tsx",
    "content": "export function NoDatasourcePlaceholder () {\n  return (\n    <div className=\"rounded-lg p-8 text-center bg-muted\">\n      <span className='text-muted-foreground/50 font-bold'>\n        Empty Data Sources list\n      </span>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/datasource/update-datasource-form.tsx",
    "content": "import { type Datasource, updateDatasource } from '@/api/datasources';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { onSubmitHelper } from '@/components/form/utils';\nimport { Button } from '@/components/ui/button';\nimport { Form, formDomEventHandlers } from '@/components/ui/form.beta';\nimport { useForm } from '@tanstack/react-form';\nimport { useState } from 'react';\nimport { z } from 'zod';\n\nconst schema = z.object({\n  name: z.string().min(1, 'Must not empty'),\n});\n\nconst field = formFieldLayout<typeof schema>();\n\nexport function UpdateDatasourceForm ({ knowledgeBaseId, datasource, onUpdated }: { knowledgeBaseId: number, datasource: Datasource, onUpdated?: () => void }) {\n  const [submissionError, setSubmissionError] = useState<unknown>(undefined);\n\n  const form = useForm<UpdateDatasourceFormParams>({\n    validators: {\n      onSubmit: schema,\n    },\n    defaultValues: {\n      name: datasource.name,\n    },\n    onSubmit: onSubmitHelper(schema, async data => {\n      await updateDatasource(knowledgeBaseId, datasource.id, data);\n      onUpdated?.();\n    }, setSubmissionError),\n  });\n\n  return (\n    <Form form={form} submissionError={submissionError}>\n      <form className=\"space-y-4\" {...formDomEventHandlers(form)}>\n        <field.Basic name=\"name\" label=\"Name\">\n          <FormInput />\n        </field.Basic>\n        <Button type=\"submit\" disabled={form.state.isSubmitting}>\n          Update\n        </Button>\n      </form>\n    </Form>\n  );\n}\n\ninterface UpdateDatasourceFormParams {\n  name: string;\n}\n"
  },
  {
    "path": "frontend/app/src/components/date-format.tsx",
    "content": "'use client';\n\nimport { format } from 'date-fns';\n\nexport function DateFormat ({ className, date, format: formatStr = 'yyyy-MM-dd HH:mm:ss' }: { className?: string, date: Date | null | undefined, format?: string }) {\n  return (\n    <time className={className}>\n      {date ? isNaN(date.getTime()) ? 'Invalid Date' : format(date, formatStr) : '-'}\n    </time>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/date-range-picker.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\n\nimport {\n  Popover,\n  PopoverContent,\n  PopoverTrigger,\n} from \"@/components/ui/popover\"\n\nimport { Button } from \"@/components/ui/button\"\nimport { Calendar } from \"@/components/ui/calendar\"\nimport { CalendarIcon } from \"lucide-react\"\nimport { DateRange } from \"react-day-picker\"\nimport { cn } from \"@/lib/utils\"\nimport { format } from \"date-fns\"\n\ninterface DateRangePickerProps {\n  value?: DateRange;\n  onChange?: (date: DateRange | undefined) => void;\n  placeholder?: string;\n  className?: string;\n  size?: 'sm' | 'default';\n}\n\nexport function DateRangePicker({\n  className,\n  value,\n  onChange,\n  placeholder = \"Pick a date range\",\n  size = 'default'\n}: DateRangePickerProps) {\n  return (\n    <div className={cn(\"grid gap-2\", className)}>\n      <Popover>\n        <PopoverTrigger asChild>\n          <Button\n            id=\"date\"\n            variant={\"outline\"}\n            size={size}\n            className={cn(\n              \"w-full justify-start text-left font-normal\",\n              !value && \"text-muted-foreground\",\n              size === 'sm' && \"text-sm h-8\"\n            )}\n          >\n            <CalendarIcon className={cn(\"mr-2\", size === 'sm' ? \"h-3 w-3\" : \"h-4 w-4\")} />\n            {value?.from ? (\n              value.to ? (\n                <>\n                  {format(value.from, size === 'sm' ? \"MMM d, y\" : \"LLL dd, y\")} -{\" \"}\n                  {format(value.to, size === 'sm' ? \"MMM d, y\" : \"LLL dd, y\")}\n                </>\n              ) : (\n                format(value.from, size === 'sm' ? \"MMM d, y\" : \"LLL dd, y\")\n              )\n            ) : (\n              <span>{placeholder}</span>\n            )}\n          </Button>\n        </PopoverTrigger>\n        <PopoverContent className=\"w-auto p-0\" align=\"start\">\n          <Calendar\n            initialFocus\n            mode=\"range\"\n            defaultMonth={value?.from}\n            selected={value}\n            onSelect={onChange}\n            numberOfMonths={size === 'sm' ? 1 : 2}\n          />\n        </PopoverContent>\n      </Popover>\n    </div>\n  )\n} "
  },
  {
    "path": "frontend/app/src/components/diff-seconds.tsx",
    "content": "import { differenceInMilliseconds } from 'date-fns';\nimport { useEffect, useState } from 'react';\n\nfunction diff (from: Date | string | number | null | undefined, to: Date | string | number | null | undefined) {\n  if (from == null) {\n    return null;\n  }\n  return (differenceInMilliseconds(to ?? new Date(), from) / 1000).toFixed(1) + 's';\n}\n\n/**\n *\n * @param className\n * @param from\n * @param to default to now\n * @constructor\n */\nexport function DiffSeconds ({ className, from, to }: { className?: string, from: Date | string | number | null | undefined, to?: Date | string | number | null | undefined }) {\n  const [seconds, setSeconds] = useState(() => diff(from, to));\n\n  useEffect(() => {\n    if (from == null) {\n      return;\n    }\n    setSeconds(diff(from, to));\n    if (to == null) {\n      const interval = setInterval(() => {\n        setSeconds(diff(from, to));\n      }, 100);\n\n      return () => {\n        clearInterval(interval);\n      };\n    }\n  }, [from, to]);\n\n  return <time className={className}>{seconds}</time>;\n}"
  },
  {
    "path": "frontend/app/src/components/document-viewer.tsx",
    "content": "import './code-theme.scss';\n\nimport { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { useEffect, useState } from 'react';\n\nimport { Button } from '@/components/ui/button';\nimport Highlight from 'highlight.js/lib/core';\nimport { ScrollArea } from '@/components/ui/scroll-area';\nimport markdown from 'highlight.js/lib/languages/markdown';\n\nexport interface DocumentPreviewProps {\n  content: string;\n  mime: string;\n}\n\nHighlight.registerLanguage('markdown', markdown);\n\nexport function DocumentViewer ({ content, mime }: DocumentPreviewProps) {\n  if (mime === 'text/markdown') {\n    return <MarkdownViewer value={content} />;\n  } else {\n    return (\n      <div className=\"whitespace-pre-wrap text-xs font-mono\">\n        {content}\n      </div>\n    );\n  }\n}\n\nconst nf = new Intl.NumberFormat('en-US');\n\nexport function DocumentPreviewDialog ({ title, name, mime, content }: { title: string, name: string, mime: string, content: string }) {\n  return (\n    <Dialog>\n      <DialogTrigger asChild>\n        <Button className=\"text-xs p-2 font-normal font-mono h-6\" variant=\"ghost\" size=\"sm\">\n          {name} <span className=\"text-muted-foreground\">({nf.format(content.length)} characters)</span>\n        </Button>\n      </DialogTrigger>\n      <DialogContent className=\"max-w-[720px] w-full\">\n        <DialogHeader>\n          <DialogTitle>\n            {title}\n          </DialogTitle>\n          <DialogDescription className=\"sr-only\" />\n        </DialogHeader>\n        <ScrollArea className=\"h-[80vh]\">\n          <DocumentViewer mime={mime} content={content} />\n        </ScrollArea>\n      </DialogContent>\n    </Dialog>\n  );\n}\n\nfunction MarkdownViewer ({ value: propValue }: { value: string }) {\n  const [value, setValue] = useState(propValue);\n\n  useEffect(() => {\n    setValue(propValue);\n    try {\n      const { value: result } = Highlight.highlight(propValue, { language: 'markdown' });\n      setValue(result);\n    } catch {\n    }\n  }, [propValue]);\n\n  return (\n    <code>\n      <pre className=\"whitespace-pre-wrap text-xs font-mono\" dangerouslySetInnerHTML={{ __html: value }} />\n    </code>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/documents/documents-table-filters.tsx",
    "content": "import { type Document, listDocumentsFiltersSchema, type ListDocumentsTableFilters, mimeTypes } from '@/api/documents';\nimport { indexStatuses } from '@/api/rag';\nimport { Button } from '@/components/ui/button';\nimport { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';\nimport { Form, FormControl, formDomEventHandlers, FormField, FormItem, FormLabel, FormMessage } from '@/components/ui/form.beta';\nimport { Input } from '@/components/ui/input';\nimport { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';\nimport { useForm } from '@tanstack/react-form';\nimport { Table as ReactTable } from '@tanstack/react-table';\nimport { capitalCase } from 'change-case-all';\nimport { ChevronDownIcon, UploadIcon } from 'lucide-react';\nimport { DateRangePicker } from '@/components/date-range-picker';\nimport { DateRange } from 'react-day-picker';\nimport { NextLink } from '@/components/nextjs/NextLink';\n\ninterface DocumentsTableFiltersProps {\n  knowledgeBaseId: number;\n  table: ReactTable<Document>;\n  onFilterChange: (data: ListDocumentsTableFilters) => void;\n}\n\nexport function DocumentsTableFilters ({ knowledgeBaseId, table, onFilterChange }: DocumentsTableFiltersProps) {\n  const form = useForm({\n    validators: {\n      onChange: listDocumentsFiltersSchema,\n    },\n    defaultValues: {\n      search: undefined,\n      mime_type: undefined,\n      index_status: undefined,\n    },\n    onSubmit: async ({ value }) => {\n      const filters = listDocumentsFiltersSchema.parse(value);\n      onFilterChange?.(filters);\n    },\n  });\n\n  return (\n    <Form form={form}>\n      <div className=\"flex flex-col gap-4\">\n        {/* Top row - Search and Upload */}\n        <div className=\"flex items-center justify-between\">\n          <div className=\"flex items-center gap-2\">\n            <FormField\n              name=\"search\"\n              render={(field) => (\n                <FormItem>\n                  <FormControl>\n                    <Input\n                      name={field.name}\n                      className=\"h-8 text-sm w-[300px]\"\n                      onBlur={field.handleBlur}\n                      onChange={ev => field.handleChange(ev.target.value)}\n                      value={field.state.value ?? ''}\n                      placeholder=\"Search documents\"\n                      onKeyDown={(e) => {\n                        if (e.key === 'Enter') {\n                          e.preventDefault();\n                          form.handleSubmit();\n                        }\n                      }}\n                    />\n                  </FormControl>\n                </FormItem>\n              )}\n            />\n            <Button \n              type=\"submit\" \n              size=\"sm\" \n              className=\"h-8 px-3\"\n              onClick={(e) => {\n                e.preventDefault();\n                form.handleSubmit();\n              }}\n            >\n              Search\n            </Button>\n          </div>\n          <div className=\"flex items-center gap-2\">\n            <NextLink\n              href={`/knowledge-bases/${knowledgeBaseId}/data-sources/new?type=file`}\n              variant=\"secondary\"\n              className=\"h-8 text-sm px-3\"\n            >\n              <UploadIcon className=\"mr-2 size-3\" />\n              Upload\n            </NextLink>\n          </div>\n        </div>\n\n        {/* Bottom row - Filters */}\n        <div className=\"flex items-center gap-2 flex-wrap\">\n\n          <FormField\n            name=\"mime_type\"\n            render={(field) => (\n              <FormItem>\n                <Select value={field.state.value ?? ''} name={field.name} onValueChange={field.handleChange}>\n                  <SelectTrigger className=\"h-8 text-sm font-normal hover:bg-accent\" onBlur={field.handleBlur}>\n                    <SelectValue placeholder=\"Document Type\" />\n                  </SelectTrigger>\n                  <SelectContent>\n                    {mimeTypes.map(mime => (\n                      <SelectItem key={mime.value} value={mime.value}>\n                        {mime.name}\n                      </SelectItem>\n                    ))}\n                  </SelectContent>\n                </Select>\n              </FormItem>\n            )}\n          />\n\n          <FormField\n            name=\"index_status\"\n            render={(field) => (\n              <FormItem>\n                <Select value={field.state.value ?? ''} name={field.name} onValueChange={field.handleChange}>\n                  <SelectTrigger className=\"h-8 text-sm font-normal hover:bg-accent\" onBlur={field.handleBlur}>\n                    <SelectValue placeholder=\"Index Status\" />\n                  </SelectTrigger>\n                  <SelectContent>\n                    {indexStatuses.map(indexStatus => (\n                      <SelectItem key={indexStatus} value={indexStatus}>\n                        {capitalCase(indexStatus)}\n                      </SelectItem>\n                    ))}\n                  </SelectContent>\n                </Select>\n              </FormItem>\n            )}\n          />\n\n          <FormField\n            name=\"created_at\"\n            render={(field) => (\n              <FormItem>\n                <DateRangePicker\n                  value={field.state.value ? { from: field.state.value[0], to: field.state.value[1] } : undefined}\n                  onChange={(range) => field.handleChange(range ? [range.from, range.to] : undefined)}\n                  placeholder=\"Created Time\"\n                  size=\"sm\"\n                />\n              </FormItem>\n            )}\n          />\n\n          <FormField\n            name=\"updated_at\"\n            render={(field) => (\n              <FormItem>\n                <DateRangePicker\n                  value={field.state.value ? { from: field.state.value[0], to: field.state.value[1] } : undefined}\n                  onChange={(range) => field.handleChange(range ? [range.from, range.to] : undefined)}\n                  placeholder=\"Updated Time\"\n                  size=\"sm\"\n                />\n              </FormItem>\n            )}\n          />\n\n          <FormField\n            name=\"last_modified_at\"\n            render={(field) => (\n              <FormItem>\n                <DateRangePicker\n                  value={field.state.value ? { from: field.state.value[0], to: field.state.value[1] } : undefined}\n                  onChange={(range) => field.handleChange(range ? [range.from, range.to] : undefined)}\n                  placeholder=\"Last Modified Time\"\n                  size=\"sm\"\n                />\n              </FormItem>\n            )}\n          />\n\n          <Button \n            variant=\"ghost\" \n            className=\"text-sm font-normal h-8 px-2 hover:bg-accent\"\n            onClick={() => form.reset()}\n          >\n            Clear filters\n          </Button>\n        </div>\n      </div>\n    </Form>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/documents/documents-table.tsx",
    "content": "'use client';\n\nimport { link } from '@/components/cells/link';\nimport { type Document, listDocuments, type ListDocumentsTableFilters } from '@/api/documents';\nimport { deleteKnowledgeBaseDocument, rebuildKBDocumentIndex } from '@/api/knowledge-base';\nimport { actions } from '@/components/cells/actions';\nimport { datetime } from '@/components/cells/datetime';\nimport { mono } from '@/components/cells/mono';\nimport { DatasourceCell } from '@/components/cells/reference';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { DocumentPreviewDialog } from '@/components/document-viewer';\nimport { DocumentsTableFilters } from '@/components/documents/documents-table-filters';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { CellContext, ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { TrashIcon, UploadIcon, BlocksIcon, WrenchIcon, DownloadIcon, FileDownIcon } from 'lucide-react';\nimport { useMemo, useState } from 'react';\nimport { toast } from 'sonner';\nimport { Input } from '@/components/ui/input';\nimport { Button } from '@/components/ui/button';\nimport { parseHref } from '@/components/chat/utils';\n\nconst helper = createColumnHelper<Document>();\n\nconst truncateUrl = (url: string, maxLength: number = 30): string => {\n  if (!url || url.length <= maxLength) return url;\n  const start = url.substring(0, maxLength / 2);\n  const end = url.substring(url.length - maxLength / 2);\n  return `${start}...${end}`;\n};\n\nconst href = (cell: CellContext<Document, string>) => {\n  const url = cell.getValue();\n  if (/^https?:\\/\\//.test(url)) {\n    return <a className=\"underline\" href={url} target=\"_blank\">{url}</a>;\n  } else if (url.startsWith('uploads/')) {\n    return (\n      <a className=\"underline\" {...parseHref(cell.row.original)}>\n        <FileDownIcon className=\"inline-flex size-4 mr-1 stroke-1\" />\n        {truncateUrl(url)}\n      </a>\n    );\n  } else {\n    return <span title={url}>{truncateUrl(url)}</span>;\n  }\n};\n\n\nconst getColumns = (kbId: number) => [\n  helper.accessor('id', { header: \"ID\", cell: mono }),\n  helper.display({\n    id: 'name', \n    header: 'NAME',\n    cell: ({ row }) =>\n      <DocumentPreviewDialog\n        title={row.original.name}\n        name={row.original.name}\n        mime={row.original.mime_type}\n        content={row.original.content}\n      />,\n  }),\n  helper.accessor('source_uri', {\n    header: \"SOURCE URI\",\n    cell: href,\n  }),\n  helper.accessor('data_source', { header: \"DATA SOURCE\", cell: ctx => <DatasourceCell {...ctx.getValue()} /> }),\n  helper.accessor('updated_at', { header: \"LAST UPDATED\", cell: datetime }),\n  helper.accessor('index_status', { header: \"INDEX STATUS\", cell: mono }),\n  helper.display({\n    id: 'op',\n    header: 'ACTIONS',\n    cell: actions(row => [\n      {\n        type: 'label',\n        title: 'Actions',\n      },\n      {\n        key: 'rebuild-index',\n        title: 'Rebuild Index',\n        icon: <WrenchIcon className=\"size-3\" />,\n        action: async (context) => {\n          try {\n            await rebuildKBDocumentIndex(kbId, row.id);\n            context.table.reload?.();\n            context.startTransition(() => {\n              context.router.refresh();\n            });\n            context.setDropdownOpen(false);\n            toast.success(`Successfully rebuild index for document \"${row.name}\"`);\n          } catch (e) {\n            toast.error(`Failed to rebuild index for document \"${row.name}\"`, {\n              description: getErrorMessage(e),\n            });\n            return Promise.reject(e);\n          }\n        },\n      },\n      {\n        key: 'view-chunks',\n        title: 'View Chunks',\n        icon: <BlocksIcon className=\"size-3\" />,\n        action: async (context) => {\n          context.router.push(`/knowledge-bases/${kbId}/documents/${row.id}/chunks`);\n        },\n      },\n      {\n        type: 'separator',\n      },\n      {\n        key: 'delete-document',\n        title: 'Delete',\n        icon: <TrashIcon className=\"size-3\" />,\n        dangerous: {\n          dialogTitle: `Continue to delete document \"${row.name}\"?`,\n        },\n        action: async (context) => {\n          try {\n            await deleteKnowledgeBaseDocument(kbId, row.id);\n            context.table.reload?.();\n            context.startTransition(() => {\n              context.router.refresh();\n            });\n            context.setDropdownOpen(false);\n            toast.success(`Successfully deleted document \"${row.name}\"`);\n          } catch (e) {\n            toast.error(`Failed to delete document \"${row.name}\"`, {\n              description: getErrorMessage(e),\n            });\n            return Promise.reject(e);\n          }\n        },\n      },\n    ]),\n  }),\n] as ColumnDef<Document>[];\n\nexport function DocumentsTable ({ knowledgeBaseId }: { knowledgeBaseId: number }) {\n  const [filters, setFilters] = useState<ListDocumentsTableFilters>({});\n\n  const columns = useMemo(() => {\n    return [...getColumns(knowledgeBaseId)];\n  }, [knowledgeBaseId]);\n\n  return (\n    <DataTableRemote\n      toolbar={((table) => (\n          <div className=\"py-1\">\n            <DocumentsTableFilters\n              knowledgeBaseId={knowledgeBaseId}\n              table={table}\n              onFilterChange={setFilters}\n            />\n        </div>\n      ))}\n      columns={columns}\n      apiKey={knowledgeBaseId != null ? `api.datasource.${knowledgeBaseId}.documents` : 'api.documents.list'}\n      api={(params) => listDocuments({ ...params, ...filters, knowledge_base_id: knowledgeBaseId })}\n      apiDeps={[filters]}\n      idColumn=\"id\"\n    />\n  );\n}\n\n"
  },
  {
    "path": "frontend/app/src/components/embedding-models/CreateEmbeddingModelForm.tsx",
    "content": "'use client';\n\nimport { type CreateEmbeddingModel, createEmbeddingModel, type EmbeddingModel, testEmbeddingModel } from '@/api/embedding-models';\nimport { useEmbeddingModelProviders } from '@/components/embedding-models/hooks';\nimport { ProviderSelect } from '@/components/form/biz';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { FormRootError } from '@/components/form/root-error';\nimport { onSubmitHelper } from '@/components/form/utils';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { ProviderDescription } from '@/components/provider-description';\nimport { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '@/components/ui/accordion';\nimport { Form, formDomEventHandlers, FormSubmit } from '@/components/ui/form.beta';\nimport { useModelProvider } from '@/hooks/use-model-provider';\nimport { zodJsonText } from '@/lib/zod';\nimport { useForm } from '@tanstack/react-form';\nimport { useId, useState } from 'react';\nimport { toast } from 'sonner';\nimport { z } from 'zod';\n\nconst unsetForm = z.object({\n  name: z.string().min(1, 'Must not empty'),\n  provider: z.string().min(1, 'Must not empty'),\n  vector_dimension: z.coerce.number().int().positive(),\n  config: zodJsonText().optional(),\n});\n\nconst strCredentialForm = unsetForm.extend({\n  model: z.string().min(1, 'Must not empty'),\n  credentials: z.string().min(1, 'Must not empty'),\n});\n\nconst dictCredentialForm = unsetForm.extend({\n  model: z.string().min(1, 'Must not empty'),\n  credentials: zodJsonText(),\n});\n\nconst field = formFieldLayout<CreateEmbeddingModel>();\n\nexport function CreateEmbeddingModelForm ({ transitioning, onCreated }: { transitioning?: boolean, onCreated?: (embeddingModel: EmbeddingModel) => void }) {\n  const id = useId();\n  const { data: options, isLoading, error } = useEmbeddingModelProviders();\n  const [submissionError, setSubmissionError] = useState<unknown>();\n\n  const form = useForm<CreateEmbeddingModel | Omit<CreateEmbeddingModel, 'model' | 'credentials'>>({\n    validators: {\n      onSubmit: unsetForm,\n    },\n    onSubmit (props) {\n      const { value } = props;\n      const provider = options?.find(option => option.provider === value.provider);\n\n      const schema = provider\n        ? provider.credentials_type === 'str'\n          ? strCredentialForm\n          : provider.credentials_type === 'dict'\n            ? dictCredentialForm\n            : unsetForm\n        : unsetForm;\n\n      return onSubmitHelper(schema, async (values) => {\n        const { error, success } = await testEmbeddingModel(values as CreateEmbeddingModel);\n        if (!success) {\n          throw new Error(error || 'Test Embedding Model failed');\n        }\n        const embeddingModel = await createEmbeddingModel(values as CreateEmbeddingModel);\n        toast.success(`Embedding Model ${embeddingModel.name} successfully created.`);\n        onCreated?.(embeddingModel);\n      }, setSubmissionError)(props);\n    },\n    defaultValues: {\n      name: '',\n      provider: '',\n      vector_dimension: 1536,\n      config: '{}',\n    },\n  });\n\n  const provider = useModelProvider(form, options, 'default_embedding_model');\n\n  return (\n    <>\n      <Form form={form} disabled={transitioning} submissionError={submissionError}>\n        <form id={id} className=\"space-y-4 max-w-screen-sm\" {...formDomEventHandlers(form, transitioning)}>\n          <field.Basic name=\"name\" label=\"Name\">\n            <FormInput />\n          </field.Basic>\n          <field.Basic name=\"provider\" label=\"Provider\" description={provider && <ProviderDescription provider={provider} />}>\n            <ProviderSelect options={options} isLoading={isLoading} error={error} />\n          </field.Basic>\n          {provider && (\n            <>\n              <field.Basic name=\"model\" label=\"Model\" description={provider.embedding_model_description}>\n                <FormInput />\n              </field.Basic>\n              <field.Basic name=\"credentials\" label={provider.credentials_display_name} description={provider.credentials_description}>\n                {provider.credentials_type === 'str'\n                  ? <FormInput placeholder={provider.default_credentials} />\n                  : <CodeInput language=\"json\" placeholder={JSON.stringify(provider.default_credentials, undefined, 2)} />\n                }\n              </field.Basic>\n              <field.Basic name=\"vector_dimension\" label=\"Vector Dimensions\">\n                <FormInput type=\"number\" min={1} />\n              </field.Basic>\n              <Accordion type=\"multiple\">\n                <AccordionItem value=\"advanced-settings\">\n                  <AccordionTrigger>\n                    Advanced Settings\n                  </AccordionTrigger>\n                  <AccordionContent className=\"px-4\">\n                    <field.Basic name=\"config\" label=\"Config\" description={provider.config_description}>\n                      <CodeInput language=\"json\" />\n                    </field.Basic>\n                  </AccordionContent>\n                </AccordionItem>\n              </Accordion>\n            </>\n          )}\n          <FormRootError title=\"Failed to create Embedding Model\" />\n          <FormSubmit disabled={!options} transitioning={transitioning} form={id}>\n            Create Embedding Model\n          </FormSubmit>\n        </form>\n      </Form>\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/embedding-models/EmbeddingModelInfo.tsx",
    "content": "'use client';\n\nimport { useEmbeddingModel } from '@/components/embedding-models/hooks';\nimport { ModelComponentInfo } from '@/components/model-component-info';\n\nexport function EmbeddingModelInfo ({ className, id }: { className?: string, id: number | undefined | null }) {\n  const { embeddingModel, isLoading } = useEmbeddingModel(id);\n\n  return <ModelComponentInfo\n    className={className}\n    model={embeddingModel}\n    url={embeddingModel => `/embedding-models/${embeddingModel.id}`}\n    isLoading={isLoading}\n    defaultName=\"Default Embedding Model\"\n  />;\n}\n"
  },
  {
    "path": "frontend/app/src/components/embedding-models/EmbeddingModelsTable.tsx",
    "content": "'use client';\n\nimport { setDefault } from '@/api/commons';\nimport { type EmbeddingModel, listEmbeddingModels } from '@/api/embedding-models';\nimport { actions } from '@/components/cells/actions';\nimport { mono } from '@/components/cells/mono';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { Badge } from '@/components/ui/badge';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport Link from 'next/link';\nimport { toast } from 'sonner';\n\nexport function EmbeddingModelsTable () {\n  return (\n    <DataTableRemote\n      columns={columns}\n      apiKey=\"api.embedding-models.list\"\n      api={listEmbeddingModels}\n      idColumn=\"id\"\n    />\n  );\n}\n\nconst helper = createColumnHelper<EmbeddingModel>();\nconst columns: ColumnDef<EmbeddingModel, any>[] = [\n  helper.accessor('id', {\n    header: 'ID',\n    cell: ({ row }) => row.original.id\n  }),\n  helper.accessor('name', {\n    header: 'NAME',\n    cell: ({ row }) => {\n      const { id, name, is_default } = row.original;\n      return (\n        <Link className=\"flex gap-1 items-center underline\" href={`/embedding-models/${id}`}>\n          {is_default && <Badge>default</Badge>}\n          {name}\n        </Link>\n      );\n    },\n  }),\n  helper.display({\n    header: 'PROVIDER / MODEL',\n    cell: ({ row }) => {\n      const { model, provider } = row.original;\n      return (\n        <>\n          <strong>{provider}</strong>/<span>{model}</span>\n        </>\n      );\n    },\n  }),\n  helper.accessor('vector_dimension', { \n    header: 'VECTOR DIMENSION',\n    cell: mono \n  }),\n  helper.display({\n    id: 'Operations',\n    header: 'ACTIONS',\n    cell: actions(row => ([\n      {\n        key: 'set-default',\n        title: 'Set Default',\n        disabled: row.is_default,\n        action: async (context) => {\n          try {\n            await setDefault('embedding-models', row.id);\n            context.table.reload?.();\n            context.startTransition(() => {\n              context.router.refresh();\n            });\n            context.setDropdownOpen(false);\n            toast.success(`Successfully set default Embedding Model to ${row.name}.`);\n          } catch (e) {\n            toast.error(`Failed to set default Embedding Model to ${row.name}.`, {\n              description: getErrorMessage(e),\n            });\n            throw e;\n          }\n        },\n      },\n    ])),\n  }),\n];\n"
  },
  {
    "path": "frontend/app/src/components/embedding-models/UpdateEmbeddingModelForm.tsx",
    "content": "'use client';\n\nimport { setDefault } from '@/api/commons';\nimport { type EmbeddingModel, updateEmbeddingModel, type UpdateEmbeddingModel } from '@/api/embedding-models';\nimport { useEmbeddingModelProviders } from '@/components/embedding-models/hooks';\nimport { ProviderSelect } from '@/components/form/biz';\nimport { FormInput, FormSwitch } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { fieldAccessor, GeneralSettingsField, type GeneralSettingsFieldAccessor, GeneralSettingsForm } from '@/components/settings-form';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { zodJsonText } from '@/lib/zod';\nimport { format } from 'date-fns';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\nimport { z } from 'zod';\n\nexport function UpdateEmbeddingModelForm ({ embeddingModel }: { embeddingModel: EmbeddingModel }) {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n  const { data: options, isLoading, error } = useEmbeddingModelProviders();\n\n  const provider = options?.find(option => option.provider === embeddingModel.provider);\n\n  return (\n    <div className=\"max-w-screen-sm space-y-4\">\n      <GeneralSettingsForm<UpdateEmbeddingModel>\n        data={embeddingModel}\n        readonly={false}\n        loading={transitioning}\n        onUpdate={async (data, path) => {\n          if (path[0] === 'is_default') {\n            await setDefault('embedding-models', embeddingModel.id);\n          } else {\n            const key = path[0] as keyof UpdateEmbeddingModel;\n            await updateEmbeddingModel(embeddingModel.id, {\n              [key]: data[key],\n            });\n          }\n          startTransition(() => {\n            router.refresh();\n          });\n        }}\n      >\n        <GeneralSettingsField accessor={idAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"ID\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={nameAccessor} schema={nameSchema}>\n          <field.Basic label=\"Name\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={providerAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Provider\" name=\"value\" description={provider?.provider_description}>\n            <ProviderSelect options={options} isLoading={isLoading} error={error} />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={modelAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Model\" name=\"value\" description={provider?.embedding_model_description}>\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        {provider && (\n          provider.credentials_type === 'str'\n            ? (\n              <GeneralSettingsField accessor={stringCredentialAccessor} schema={stringCredentialSchema}>\n                <field.Basic label=\"Credentials\" name=\"value\" description={provider?.credentials_description}>\n                  <FormInput placeholder={provider.default_credentials} />\n                </field.Basic>\n              </GeneralSettingsField>\n            ) : (\n              <GeneralSettingsField accessor={dictCredentialAccessor} schema={dictCredentialSchema}>\n                <field.Basic label=\"Credentials\" name=\"value\" description={provider?.credentials_description}>\n                  <CodeInput language=\"json\" placeholder={JSON.stringify(provider.default_credentials, undefined, 2)} />\n                </field.Basic>\n              </GeneralSettingsField>\n            )\n        )}\n        <GeneralSettingsField accessor={vectorDimensionAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Vector Dimensions\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={configAccessor} schema={configSchema}>\n          <field.Basic label=\"Config\" name=\"value\" description={provider?.config_description}>\n            <CodeInput language=\"json\" />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={isDefaultAccessor} schema={anySchema}>\n          <field.Contained label=\"Is Default\" name=\"value\">\n            <FormSwitch />\n          </field.Contained>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={createdAtAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Created At\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={updatedAtAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Updated At\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n      </GeneralSettingsForm>\n    </div>\n  );\n}\n\nconst field = formFieldLayout<{ value: any | any[] }>();\n\nconst anySchema = z.any();\n\nconst getDatetimeAccessor = (key: KeyOfType<EmbeddingModel, Date | undefined | null>): GeneralSettingsFieldAccessor<EmbeddingModel, string> => {\n  return {\n    path: [key],\n    get (data) {\n      return format(data[key], 'yyyy-MM-dd HH:mm:ss');\n    },\n    set () {\n      throw new Error(`update ${key} is not supported`);\n    },\n  };\n};\n\nconst nameSchema = z.string();\nconst configSchema = zodJsonText();\n\nconst nameAccessor = fieldAccessor<UpdateEmbeddingModel, 'name'>('name');\nconst idAccessor = fieldAccessor<EmbeddingModel, 'id'>('id');\nconst providerAccessor = fieldAccessor<EmbeddingModel, 'provider'>('provider');\nconst modelAccessor = fieldAccessor<EmbeddingModel, 'model'>('model');\nconst vectorDimensionAccessor = fieldAccessor<EmbeddingModel, 'vector_dimension'>('vector_dimension');\nconst configAccessor: GeneralSettingsFieldAccessor<UpdateEmbeddingModel, string> = {\n  path: ['config'],\n  get (data) {\n    return JSON.stringify(data.config, undefined, 2);\n  },\n  set (data, value) {\n    return {\n      ...data,\n      // TODO: This is already converted to object by zodJsonText().\n      config: value,\n    };\n  },\n};\nconst isDefaultAccessor = fieldAccessor<EmbeddingModel, 'is_default'>('is_default');\nconst createdAtAccessor = getDatetimeAccessor('created_at');\nconst updatedAtAccessor = getDatetimeAccessor('updated_at');\n\nconst stringCredentialSchema = z.string().optional();\nconst dictCredentialSchema = zodJsonText();\n\nconst stringCredentialAccessor = fieldAccessor<UpdateEmbeddingModel, 'credentials'>('credentials', '');\nconst dictCredentialAccessor: GeneralSettingsFieldAccessor<UpdateEmbeddingModel, string> = {\n  path: ['credentials'],\n  get (data) {\n    return JSON.stringify(data.config, undefined, 2);\n  },\n  set (data, value) {\n    return {\n      ...data,\n      // TODO: This is already converted to object by zodJsonText().\n      config: value,\n    };\n  },\n};"
  },
  {
    "path": "frontend/app/src/components/embedding-models/hooks.tsx",
    "content": "import { listEmbeddingModelOptions, listEmbeddingModels } from '@/api/embedding-models';\nimport { listAllHelper } from '@/lib/request';\nimport useSWR from 'swr';\n\nexport function useAllEmbeddingModels (flag = true) {\n  return useSWR(flag && 'api.embedding-models.list-all', () => listAllHelper(listEmbeddingModels, 'id'));\n}\n\nexport function useEmbeddingModel (id: number | null | undefined) {\n  const { data, mutate, ...rest } = useAllEmbeddingModels(id != null);\n\n  return {\n    embeddingModel: data?.find(embeddingModel => embeddingModel.id === id),\n    ...rest,\n  };\n}\n\nexport function useEmbeddingModelProviders () {\n  return useSWR('api.embedding-models.list-options', listEmbeddingModelOptions);\n}\n"
  },
  {
    "path": "frontend/app/src/components/error-card.tsx",
    "content": "import { Card, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';\nimport { LockKeyholeIcon } from 'lucide-react';\nimport type { ReactNode } from 'react';\n\nexport interface ErrorCardProps {\n  title: ReactNode;\n  message?: ReactNode;\n  children?: ReactNode;\n}\n\nexport function ErrorCard ({\n  title, message, children,\n}: ErrorCardProps) {\n  return (\n    <Card className=\"shadow-2xl mx-8 max-w-full w-[480px]\">\n      <CardHeader>\n        <CardTitle>\n          <LockKeyholeIcon className=\"mr-2 inline-block size-6 align-bottom text-muted-foreground\" />\n          {title}\n        </CardTitle>\n        <CardDescription>\n          {message}\n        </CardDescription>\n      </CardHeader>\n      <CardFooter>\n        {children}\n      </CardFooter>\n    </Card>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/evaluations/cells.tsx",
    "content": "import type { EvaluationTaskItem } from '@/api/evaluations';\nimport { AutoErrorMessagePopper } from '@/components/cells/error-message';\nimport { DocumentPreviewDialog } from '@/components/document-viewer';\nimport type { CellContext } from '@tanstack/react-table';\nimport { CircleCheckIcon, CircleDashedIcon, CircleXIcon, Loader2Icon } from 'lucide-react';\nimport { useMemo } from 'react';\nimport wcwidth from 'wcwidth';\n\n// eslint-disable-next-line react/display-name\nexport const documentCell = (title: string, trimLength = 50, mime = 'text/markdown') => (context: CellContext<any, string | undefined | null>) => {\n  const content = context.getValue();\n\n  const splitIndex = useMemo(() => {\n    if (!content) {\n      return -1;\n    }\n\n    let n = 0;\n\n    for (let i = 0; i < content.length; i++) {\n      if (n < trimLength) {\n        n += wcwidth(content[i]);\n      } else {\n        return i;\n      }\n    }\n\n    return -1;\n  }, [content, trimLength]);\n\n  if (!content) {\n    return '--';\n  }\n\n  if (splitIndex < 0) {\n    return content;\n  }\n\n  return (\n    <DocumentPreviewDialog\n      title={title}\n      name={content.slice(0, splitIndex) + '...'}\n      mime={mime}\n      content={content}\n    />\n  );\n};\n\nexport const textChunksArrayCell = (context: CellContext<any, string[] | undefined | null>) => {\n  return (context.getValue()?.length ?? '-') + ' Items';\n};\n\nexport const evaluationTaskStatusCell = (context: CellContext<EvaluationTaskItem, EvaluationTaskItem['status']>) => {\n  return <StatusCell row={context.row.original} />;\n};\n\nfunction StatusCell ({ row }: { row: EvaluationTaskItem }) {\n  const { status, error_msg } = row;\n  return (\n    <span className=\"inline-flex gap-1\">\n      {status === 'not_start' && <CircleDashedIcon className=\"text-muted-foreground flex-shrink-0 size-4\" />}\n      {status === 'cancel' && <CircleXIcon className=\"text-muted-foreground flex-shrink-0 size-4\" />}\n      {status === 'evaluating' && <Loader2Icon className=\"text-info flex-shrink-0 size-4 animate-spin repeat-infinite\" />}\n      {status === 'done' && <CircleCheckIcon className=\"text-success flex-shrink-0 size-4\" />}\n      {status === 'error' && <CircleXIcon className=\"text-destructive flex-shrink-0 size-4\" />}\n      <span className=\"text-accent-foreground\">\n        {status === 'not_start' ? 'Not started' : status === 'evaluating' ? 'Evaluating' : status === 'done' ? 'Done' : status === 'cancel' ? 'Cancelled' : 'Error:'}\n      </span>\n      {status === 'error' && <AutoErrorMessagePopper trimLength={28}>{error_msg}</AutoErrorMessagePopper>}\n    </span>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/create-evaluation-dataset-form.stories.tsx",
    "content": "import { CreateEvaluationDatasetForm } from '@/components/evaluations/create-evaluation-dataset-form';\nimport type { Meta, StoryObj } from '@storybook/react';\n\nconst meta = {\n  title: 'Components/Evaluations/CreateEvaluationDatasetForm',\n  component: CreateEvaluationDatasetForm,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ width: 640 }}>\n        <Story />\n      </div>\n    ),\n  ],\n} satisfies Meta<typeof CreateEvaluationDatasetForm>;\n\nexport const Default = {} satisfies StoryObj<typeof meta>;\n\nexport default meta;\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/create-evaluation-dataset-form.tsx",
    "content": "import type { ComponentProps } from 'react';\nimport { FileInput } from '@/components/form/widgets/FileInput';\nimport { FormInput } from '@/components/form/control-widget';\nimport Link from 'next/link';\nimport { createEvaluationDataset } from '@/api/evaluations';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { uploadFiles } from '@/api/datasources';\nimport { withCreateEntityForm } from '@/components/form/create-entity-form';\nimport { z } from 'zod';\nimport { zodFile } from '@/lib/zod';\n\nconst schema = z.object({\n  name: z.string().min(1),\n  upload_file: zodFile().optional(),\n});\n\nconst field = formFieldLayout<typeof schema>();\n\nconst FormImpl = withCreateEntityForm(schema, async ({ upload_file, ...params }) => {\n  if (upload_file != null) {\n    const [file] = await uploadFiles([upload_file]);\n    return await createEvaluationDataset({\n      ...params,\n      upload_id: file.id,\n    });\n  } else {\n    return await createEvaluationDataset({\n      ...params,\n    });\n  }\n});\n\nexport function CreateEvaluationDatasetForm ({ transitioning, onCreated }: Omit<ComponentProps<typeof FormImpl>, 'defaultValues' | 'children'>) {\n  return (\n    <FormImpl\n      defaultValues={{\n        name: '',\n      }}\n      transitioning={transitioning}\n      onCreated={onCreated}\n    >\n      <field.Basic name=\"name\" label=\"Name\" required>\n        <FormInput />\n      </field.Basic>\n      <field.Basic name=\"upload_file\" label=\"Upload File\" description={<>Evaluation dataset CSV file. See the <Link className='underline' href='https://autoflow.tidb.ai/evaluation#prerequisites'>documentation</Link> for the format.</>}>\n        <FileInput accept={['.csv']} />\n      </field.Basic>\n    </FormImpl>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/create-evaluation-dataset-item-form.stories.tsx",
    "content": "import { CreateEvaluationDatasetItemForm } from '@/components/evaluations/create-evaluation-dataset-item-form';\nimport type { Meta, StoryObj } from '@storybook/react';\n\nconst meta = {\n  title: 'Components/Evaluations/CreateEvaluationDatasetItemForm',\n  component: CreateEvaluationDatasetItemForm,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ width: 640 }}>\n        <Story />\n      </div>\n    ),\n  ],\n} satisfies Meta<typeof CreateEvaluationDatasetItemForm>;\n\nexport const Default = {\n  args: {\n    evaluationDatasetId: 8848,\n  },\n} satisfies StoryObj<typeof meta>;\n\nexport default meta;\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/create-evaluation-dataset-item-form.tsx",
    "content": "import { createEvaluationDatasetItem, type EvaluationDatasetItem } from '@/api/evaluations';\nimport { FormTextarea } from '@/components/form/control-widget';\nimport { withCreateEntityForm } from '@/components/form/create-entity-form';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { zodJson } from '@/lib/zod';\nimport { useMemo } from 'react';\nimport { z } from 'zod';\n\nconst schema = z.object({\n  query: z.string().min(1),\n  reference: z.string().min(1),\n  retrieved_contexts: z.string().min(1).array(),\n  extra: z.string()\n    .pipe(z.custom<string>(s => {\n      if (!s.trim()) return true;\n      try {\n        JSON.parse(s);\n        return true;\n      } catch {\n        return false;\n      }\n    }, 'Invalid JSON'))\n    .transform(s => {\n      if (s.trim()) {\n        return JSON.parse(s);\n      } else {\n        return undefined;\n      }\n    })\n    .pipe(zodJson()),\n});\n\nconst field = formFieldLayout<typeof schema>();\n\nexport function CreateEvaluationDatasetItemForm ({ evaluationDatasetId, transitioning, onCreated }: { evaluationDatasetId: number, transitioning?: boolean, onCreated?: (item: EvaluationDatasetItem) => void }) {\n  const FormImpl = useMemo(() => withCreateEntityForm(schema, params => createEvaluationDatasetItem(evaluationDatasetId, params)), [evaluationDatasetId]);\n\n  return (\n    <FormImpl\n      defaultValues={{\n        query: '',\n        reference: '',\n        retrieved_contexts: [],\n        extra: '{}',\n      }}\n      transitioning={transitioning}\n      onCreated={onCreated}\n    >\n      <field.Basic name=\"query\" label=\"Query\" required>\n        <FormTextarea />\n      </field.Basic>\n      <field.Basic name=\"reference\" label=\"Reference\" required>\n        <FormTextarea />\n      </field.Basic>\n      <field.PrimitiveArray name=\"retrieved_contexts\" label=\"Retrieved Contexts\" newItemValue={() => ''} required>\n        <FormTextarea />\n      </field.PrimitiveArray>\n      <field.Basic name=\"extra\" label=\"Extra\">\n        <CodeInput language=\"json\" />\n      </field.Basic>\n    </FormImpl>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/create-evaluation-task-form.stories.tsx",
    "content": "import { CreateEvaluationTaskForm } from '@/components/evaluations/create-evaluation-task-form';\nimport type { Meta, StoryObj } from '@storybook/react';\n\nconst meta = {\n  title: 'Components/Evaluations/CreateEvaluationTaskForm',\n  component: CreateEvaluationTaskForm,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ width: 640 }}>\n        <Story />\n      </div>\n    ),\n  ],\n} satisfies Meta<typeof CreateEvaluationTaskForm>;\n\nexport const Default = {} satisfies StoryObj<typeof meta>;\n\nexport default meta;\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/create-evaluation-task-form.tsx",
    "content": "import { createEvaluationTask, type CreateEvaluationTaskParams } from '@/api/evaluations';\nimport { ChatEngineSelect, EvaluationDatasetSelect } from '@/components/form/biz';\nimport { FormInput } from '@/components/form/control-widget';\nimport { withCreateEntityForm as withCreateEntityForm } from '@/components/form/create-entity-form';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport type { ComponentProps } from 'react';\nimport { z, type ZodType } from 'zod';\n\nconst schema = z.object({\n  name: z.string().min(1),\n  evaluation_dataset_id: z.number().int(),\n  chat_engine: z.string().optional(),\n  run_size: z.coerce.number().int().min(1).optional(),\n}) satisfies ZodType<CreateEvaluationTaskParams, any, any>;\n\nconst FormImpl = withCreateEntityForm(schema, createEvaluationTask);\nconst field = formFieldLayout<typeof schema>();\n\nexport function CreateEvaluationTaskForm ({ transitioning, onCreated }: Omit<ComponentProps<typeof FormImpl>, 'defaultValues' | 'children'>) {\n  return (\n    <FormImpl\n      transitioning={transitioning}\n      onCreated={onCreated}\n    >\n      <field.Basic name=\"name\" label=\"Name\" required defaultValue=\"\">\n        <FormInput />\n      </field.Basic>\n      <field.Basic name=\"evaluation_dataset_id\" label=\"Evaluation Dataset\" required>\n        <EvaluationDatasetSelect />\n      </field.Basic>\n      <field.Basic name=\"chat_engine\" label=\"Chat Engine\">\n        <ChatEngineSelect />\n      </field.Basic>\n      <field.Basic name=\"run_size\" label=\"Run Size\" description=\"Number of evaluation dataset items to run. Default to run whole dataset.\">\n        <FormInput type=\"number\" min={1} step={1} />\n      </field.Basic>\n    </FormImpl>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-dataset-info.tsx",
    "content": "import { type EvaluationDataset, updateEvaluationDataset } from '@/api/evaluations';\nimport { mutateEvaluationDatasets, useEvaluationDataset } from '@/components/evaluations/hooks';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { createAccessorHelper, GeneralSettingsField as GeneralSettingsField, GeneralSettingsForm } from '@/components/settings-form';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { useRouter } from 'next/navigation';\nimport * as React from 'react';\nimport { useTransition } from 'react';\nimport { z } from 'zod';\n\nexport function EvaluationDatasetInfo ({ evaluationDatasetId }: { evaluationDatasetId: number }) {\n  const { evaluationDataset } = useEvaluationDataset(evaluationDatasetId);\n\n  if (evaluationDataset) {\n    return <EvaluationDatasetInfoDisplay evaluationDataset={evaluationDataset} />;\n  } else {\n    return <EvaluationDatasetInfoSkeleton />;\n  }\n}\n\nconst field = formFieldLayout<Record<'value', any>>();\n\nexport function EvaluationDatasetInfoDisplay ({ evaluationDataset }: { evaluationDataset: EvaluationDataset }) {\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  return (\n    <div className=\"space-y-4 max-w-screen-sm\">\n      <GeneralSettingsForm\n        data={evaluationDataset}\n        readonly={transitioning}\n        loading={transitioning}\n        onUpdate={async (item) => {\n          await updateEvaluationDataset(item.id, { name: item.name });\n          startTransition(() => {\n            router.refresh();\n            void mutateEvaluationDatasets();\n          });\n        }}\n      >\n        <GeneralSettingsField\n          accessor={id}\n          schema={whateverSchema}\n          readonly\n        >\n          <field.Basic name=\"value\" label=\"ID\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField\n          accessor={name}\n          schema={nameSchema}\n        >\n          <field.Basic name=\"value\" label=\"Name\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField\n          accessor={createdAt}\n          schema={whateverSchema}\n          readonly\n        >\n          <field.Basic name=\"value\" label=\"Created At\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField\n          accessor={updatedAt}\n          schema={whateverSchema}\n          readonly\n        >\n          <field.Basic name=\"value\" label=\"Updated At\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField\n          accessor={userId}\n          schema={whateverSchema}\n          readonly\n        >\n          <field.Basic name=\"value\" label=\"User ID\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n      </GeneralSettingsForm>\n    </div>\n  );\n}\n\nexport function EvaluationDatasetInfoSkeleton ({}: {}) {\n  return (\n    <div className=\"space-y-4\">\n      <div className=\"space-y-2\">\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[14em] h-[1em] rounded-sm\" />\n        </div>\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[14em] h-[1em] rounded-sm\" />\n        </div>\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[8em] h-[1em] rounded-sm\" />\n        </div>\n      </div>\n    </div>\n  );\n}\n\nconst helper = createAccessorHelper<EvaluationDataset>();\n\nconst id = helper.field('id');\nconst name = helper.field('name');\nconst userId = helper.field('user_id');\nconst createdAt = helper.dateField('created_at');\nconst updatedAt = helper.dateField('updated_at');\n\nconst whateverSchema = z.any();\nconst nameSchema = z.string().min(1);"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-dataset-items-table.tsx",
    "content": "'use client';\n\nimport { deleteEvaluationDatasetItem, type EvaluationDatasetItem, listEvaluationDatasetItems } from '@/api/evaluations';\nimport { actions } from '@/components/cells/actions';\nimport { datetime } from '@/components/cells/datetime';\nimport { link } from '@/components/cells/link';\nimport { metadataCell } from '@/components/cells/metadata';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { documentCell, textChunksArrayCell } from '@/components/evaluations/cells';\nimport { type KeywordFilter, KeywordFilterToolbar } from '@/components/evaluations/keyword-filter-toolbar';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { useState } from 'react';\n\nconst helper = createColumnHelper<EvaluationDatasetItem>();\n\nconst columns = [\n  helper.accessor('id', { header: 'ID', cell: link({ url: row => `/evaluation/datasets/${row.evaluation_dataset_id}/items/${row.id}` }) }),\n  helper.accessor('query', { header: 'QUERY', cell: documentCell('Query') }),\n  helper.accessor('reference', { header: 'REFERENCE', cell: documentCell('Reference') }),\n  helper.accessor('retrieved_contexts', { header: 'RETRIEVED CONTEXTS', cell: textChunksArrayCell }),\n  helper.accessor('extra', { header: 'EXTRA', cell: metadataCell }),\n  helper.accessor('created_at', { header: 'CREATED AT', cell: datetime }),\n  helper.accessor('updated_at', { header: 'UPDATED AT', cell: datetime }),\n  helper.display({\n    id: 'op',\n    header: 'ACTIONS',\n    cell: actions(row => ([\n      {\n        key: 'update',\n        title: 'Update',\n        action (context) {\n          context.startTransition(() => {\n            context.router.push(`/evaluation/datasets/${row.evaluation_dataset_id}/items/${row.id}`);\n          });\n        },\n      },\n      {\n        key: 'delete',\n        dangerous: {},\n        title: 'Delete',\n        async action (context) {\n          await deleteEvaluationDatasetItem(row.evaluation_dataset_id, row.id);\n          context.startTransition(() => {\n            context.router.refresh();\n          });\n          context.setDropdownOpen(false);\n          context.table.reload?.();\n        },\n      },\n    ])),\n  }),\n] as ColumnDef<EvaluationDatasetItem>[];\n\nexport function EvaluationDatasetItemsTable ({ evaluationDatasetId }: { evaluationDatasetId: number }) {\n  const [filter, setFilter] = useState<KeywordFilter>({ keyword: '' })\n  return (\n    <DataTableRemote\n      columns={columns}\n      toolbar={() => (\n        <KeywordFilterToolbar onFilterChange={setFilter} />\n      )}\n      apiKey={`api.evaluation.datasets.${evaluationDatasetId}.all-items`}\n      api={(page) => listEvaluationDatasetItems(evaluationDatasetId, { ...page, ...filter })}\n      apiDeps={[filter.keyword]}\n      idColumn=\"id\"\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-datasets-table.tsx",
    "content": "'use client';\n\nimport { deleteEvaluationDataset, type EvaluationDataset, listEvaluationDatasets } from '@/api/evaluations';\nimport { actions } from '@/components/cells/actions';\nimport { datetime } from '@/components/cells/datetime';\nimport { link } from '@/components/cells/link';\nimport { mono } from '@/components/cells/mono';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { mutateEvaluationDatasets } from '@/components/evaluations/hooks';\nimport { type KeywordFilter, KeywordFilterToolbar } from '@/components/evaluations/keyword-filter-toolbar';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { useState } from 'react';\n\nconst helper = createColumnHelper<EvaluationDataset>();\n\nconst columns = [\n  helper.accessor('id', { header: 'ID', cell: mono }),\n  helper.accessor('name', { header: 'NAME', cell: link({ text: row => row.name, url: row => `/evaluation/datasets/${row.id}` }) }),\n  helper.accessor('user_id', { header: 'USER ID' }),\n  helper.accessor('created_at', { header: 'CREATED AT', cell: datetime }),\n  helper.accessor('updated_at', { header: 'UPDATED AT', cell: datetime }),\n  helper.display({\n    id: 'op',\n    header: 'OPERATIONS',\n    cell: actions(row => [\n      {\n        key: 'update',\n        title: 'Update',\n        action: context => {\n          context.startTransition(() => {\n            context.router.push(`/evaluation/datasets/${row.id}`);\n          });\n        },\n      },\n      {\n        key: 'delete',\n        title: 'Delete',\n        dangerous: {},\n        action: async context => {\n          await deleteEvaluationDataset(row.id);\n          context.startTransition(() => {\n            context.router.refresh();\n            void mutateEvaluationDatasets();\n          });\n          context.setDropdownOpen(false);\n        },\n      },\n    ]),\n  }),\n] as ColumnDef<EvaluationDataset>[];\n\nexport function EvaluationDatasetsTable () {\n  const [filter, setFilter] = useState<KeywordFilter>({});\n  return (\n    <DataTableRemote\n      toolbar={() => (\n        <KeywordFilterToolbar onFilterChange={setFilter} />\n      )}\n      columns={columns}\n      apiKey=\"api.evaluation.datasets.list\"\n      api={page => listEvaluationDatasets({ ...page, ...filter })}\n      apiDeps={[filter.keyword]}\n      idColumn=\"id\"\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-task-info.stories.tsx",
    "content": "import type { EvaluationTask, EvaluationTaskWithSummary } from '@/api/evaluations';\nimport { EvaluationTaskInfo, EvaluationTaskInfoDisplay, EvaluationTaskInfoSkeleton } from '@/components/evaluations/evaluation-task-info';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport type { ComponentType } from 'react';\n\nconst summary = {\n  id: 10086,\n  dataset_id: 8848,\n  name: 'Demo Task',\n  created_at: new Date('2024-01-01 18:45:08'),\n  updated_at: new Date('2024-01-01 18:45:08'),\n  user_id: 'some-user-id',\n  summary: {\n    errored: 2,\n    not_start: 14,\n    succeed: 23,\n    progressing: 3,\n    avg_factual_correctness: 0.83,\n    avg_semantic_similarity: 0.76,\n    min_factual_correctness: 0.45,\n    min_semantic_similarity: 0.56,\n    max_factual_correctness: 0.85,\n    max_semantic_similarity: 0.95,\n    std_factual_correctness: 0.74,\n    std_semantic_similarity: 0.83,\n  },\n} satisfies EvaluationTaskWithSummary;\n\nconst meta = {\n  title: 'Components/Evaluations/EvaluationTaskSummary',\n  subcomponents: {\n    EvaluationTaskInfoSkeleton: EvaluationTaskInfoSkeleton,\n    EvaluationTaskInfoDisplay: EvaluationTaskInfoDisplay as ComponentType<unknown>,\n  },\n  args: {},\n} satisfies Meta<typeof EvaluationTaskInfo>;\n\nexport const Display = {\n  render () {\n    return (\n      <EvaluationTaskInfoDisplay task={summary} />\n    );\n  },\n} satisfies StoryObj<typeof meta>;\n\nexport const Skeleton = {\n  render () {\n    return (\n      <EvaluationTaskInfoSkeleton />\n    );\n  },\n} satisfies StoryObj<typeof meta>;\n\nexport default meta;"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-task-info.tsx",
    "content": "'use client';\n\nimport { cancelEvaluationTask, type EvaluationTaskSummary as EvaluationTaskSummaryType, type EvaluationTaskWithSummary, getEvaluationTaskWithSummary } from '@/api/evaluations';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { DateFormat } from '@/components/date-format';\nimport { mutateEvaluationTasks } from '@/components/evaluations/hooks';\n\nimport { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';\nimport { ChartConfig, ChartContainer, ChartTooltip, ChartTooltipContent } from '@/components/ui/chart';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport Link from 'next/link';\nimport * as React from 'react';\nimport { useMemo } from 'react';\nimport { Bar, BarChart, CartesianGrid, Label, Pie, PieChart, XAxis } from 'recharts';\nimport useSWR from 'swr';\n\nexport function EvaluationTaskInfo ({ evaluationTaskId }: { evaluationTaskId: number }) {\n  const { data } = useSWR(`api.evaluation.tasks.${evaluationTaskId}`, () => getEvaluationTaskWithSummary(evaluationTaskId));\n\n  if (data) {\n    return <EvaluationTaskInfoDisplay task={data} />;\n  } else {\n    return <EvaluationTaskInfoSkeleton />;\n  }\n}\n\nexport function EvaluationTaskInfoSkeleton () {\n  return (\n    <div className=\"space-y-4\">\n      <div className=\"py-[0.25em] text-xl\">\n        <Skeleton className=\"block w-[7em] h-[1em] rounded-sm\" />\n      </div>\n      <div className=\"space-y-2 pb-[2px]\">\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[7em] h-[1em] rounded-sm\" />\n        </div>\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[14em] h-[1em] rounded-sm\" />\n        </div>\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[14em] h-[1em] rounded-sm\" />\n        </div>\n        <div className=\"py-[0.125em] text-xs\">\n          <Skeleton className=\"block w-[8em] h-[1em] rounded-sm\" />\n        </div>\n      </div>\n      <div className=\"grid gap-4 md:grid-cols-2 lg:grid-cols-3\">\n        <div className=\"col-span-1 lg:col-span-1\">\n          <StatusPieChartSkeleton />\n        </div>\n        <div className=\"col-span-1 lg:col-span-1\">\n          <RagasMetricsChartSkeleton />\n        </div>\n      </div>\n    </div>\n  );\n}\n\nexport function EvaluationTaskInfoDisplay ({ task: { summary, ...task } }: { task: EvaluationTaskWithSummary }) {\n  const canCancel = summary.not_start > 0;\n\n  return (\n    <div className=\"space-y-4\">\n      <h2 className=\"text-xl font-semibold\">{task.name}</h2>\n      <div className=\"text-muted-foreground text-xs space-y-2\">\n        <div>Dataset: <Link className=\"text-foreground underline\" href={`/evaluation/datasets/${task.dataset_id}`}>{task.dataset_id}</Link></div>\n        <div>Created at: <DateFormat date={task.created_at} /></div>\n        <div>Updated at: <DateFormat date={task.updated_at} /></div>\n        <div>User ID: {task.user_id}</div>\n        {canCancel && <div>\n          <DangerousActionButton\n            size=\"sm\"\n            variant=\"destructive\"\n            action={async () => {\n              await cancelEvaluationTask(task.id);\n              void mutateEvaluationTasks();\n            }}\n          >\n            Cancel Task\n          </DangerousActionButton>\n        </div>}\n      </div>\n      <div className=\"grid gap-4 md:grid-cols-2 lg:grid-cols-3\">\n        <div className=\"col-span-1 lg:col-span-1\">\n          <StatusPieChart summary={summary} />\n        </div>\n        <div className=\"col-span-1 lg:col-span-1\">\n          <RagasMetricsChart summary={summary} />\n        </div>\n      </div>\n    </div>\n  );\n}\n\nconst pieChartConfig = {\n  visitors: {\n    label: 'Visitors',\n  },\n  not_start: {\n    label: 'Not Start',\n    color: 'hsl(var(--muted))',\n  },\n  progressing: {\n    label: 'Processing',\n    color: 'hsl(var(--info))',\n  },\n  succeed: {\n    label: 'Succeed',\n    color: 'hsl(var(--success))',\n  },\n  errored: {\n    label: 'Errored',\n    color: 'hsl(var(--destructive))',\n  },\n} satisfies ChartConfig;\n\nfunction StatusPieChart ({ summary }: { summary: Pick<EvaluationTaskSummaryType, 'not_start' | 'errored' | 'progressing' | 'succeed'> }) {\n  const totalTasks = useMemo(() => {\n    return summary.not_start + summary.errored + summary.progressing + summary.succeed;\n  }, [summary]);\n\n  const chartData = useMemo(() => {\n    return [\n      { status: 'not_start', tasks: summary.not_start, fill: 'hsl(var(--accent))' },\n      { status: 'progressing', tasks: summary.progressing, fill: 'hsl(var(--info))' },\n      { status: 'succeed', tasks: summary.succeed, fill: 'hsl(var(--success))' },\n      { status: 'errored', tasks: summary.errored, fill: 'hsl(var(--destructive))' },\n    ];\n  }, [summary]);\n\n  return (\n    <Card className=\"flex flex-col\">\n      <CardHeader className=\"items-center pb-0\">\n        <CardTitle className=\"text-lg font-normal\">Evaluation Items</CardTitle>\n      </CardHeader>\n      <CardContent className=\"flex-1 pb-0\">\n        <ChartContainer\n          config={pieChartConfig}\n          className=\"mx-auto aspect-square max-h-[250px]\"\n        >\n          <PieChart>\n            <ChartTooltip\n              cursor={false}\n              content={<ChartTooltipContent hideLabel />}\n            />\n            <Pie\n              data={chartData}\n              dataKey=\"tasks\"\n              nameKey=\"status\"\n              innerRadius={60}\n              strokeWidth={5}\n            >\n              <Label\n                content={({ viewBox }) => {\n                  if (viewBox && 'cx' in viewBox && 'cy' in viewBox) {\n                    return (\n                      <text\n                        x={viewBox.cx}\n                        y={viewBox.cy}\n                        textAnchor=\"middle\"\n                        dominantBaseline=\"middle\"\n                      >\n                        <tspan\n                          x={viewBox.cx}\n                          y={viewBox.cy}\n                          className=\"fill-foreground text-3xl font-bold\"\n                        >\n                          {totalTasks.toLocaleString()}\n                        </tspan>\n                        <tspan\n                          x={viewBox.cx}\n                          y={(viewBox.cy || 0) + 24}\n                          className=\"fill-muted-foreground\"\n                        >\n                          Items\n                        </tspan>\n                      </text>\n                    );\n                  }\n                }}\n              />\n            </Pie>\n          </PieChart>\n        </ChartContainer>\n      </CardContent>\n    </Card>\n  );\n}\n\nconst color_placeholder = '#71717a40';\n\nfunction StatusPieChartSkeleton () {\n  return (\n    <Card className=\"flex flex-col\">\n      <CardHeader className=\"items-center pb-0\">\n        <CardTitle className=\"text-lg font-normal\">Evaluation Items</CardTitle>\n      </CardHeader>\n      <CardContent className=\"flex-1 pb-0\">\n        <ChartContainer\n          config={pieChartConfig}\n          className=\"mx-auto aspect-square max-h-[250px]\"\n        >\n          <PieChart>\n            <Pie\n              animationDuration={0}\n              data={[{ count: 1, state: '', fill: color_placeholder }]}\n              dataKey=\"count\"\n              nameKey=\"state\"\n              innerRadius={60}\n              strokeWidth={5}\n            >\n              <Label\n                content={({ viewBox }) => {\n                  if (viewBox && 'cx' in viewBox && 'cy' in viewBox) {\n                    return (\n                      <text\n                        x={viewBox.cx}\n                        y={viewBox.cy}\n                        textAnchor=\"middle\"\n                        dominantBaseline=\"middle\"\n                      >\n                        <tspan\n                          x={viewBox.cx}\n                          y={viewBox.cy}\n                          className=\"fill-muted-foreground text-3xl font-bold\"\n                        >\n                          --\n                        </tspan>\n                        <tspan\n                          x={viewBox.cx}\n                          y={(viewBox.cy || 0) + 24}\n                          className=\"fill-muted-foreground\"\n                        >\n                          Items\n                        </tspan>\n                      </text>\n                    );\n                  }\n                }}\n              />\n            </Pie>\n          </PieChart>\n        </ChartContainer>\n      </CardContent>\n    </Card>\n  );\n}\n\nconst chartConfig = {\n  semantic_similarity: {\n    label: 'Semantic Similarity',\n    color: 'hsl(var(--chart-1))',\n  },\n  factual_correctness: {\n    label: 'Factual Correctness',\n    color: 'hsl(var(--chart-2))',\n  },\n} satisfies ChartConfig;\n\nexport function RagasMetricsChart ({ summary }: { summary: Pick<EvaluationTaskSummaryType, keyof EvaluationTaskSummaryType & (`${string}_correctness` | `${string}_similarity`)> }) {\n  const chartData = useMemo(() => {\n    return [\n      { metrics: 'min', semantic_similarity: summary.min_semantic_similarity, factual_correctness: summary.min_factual_correctness },\n      { metrics: 'max', semantic_similarity: summary.max_semantic_similarity, factual_correctness: summary.max_factual_correctness },\n      { metrics: 'avg', semantic_similarity: summary.avg_semantic_similarity, factual_correctness: summary.avg_factual_correctness },\n      { metrics: 'std', semantic_similarity: summary.std_semantic_similarity, factual_correctness: summary.std_factual_correctness },\n    ];\n  }, [summary]);\n\n  return (\n    <Card className=\"flex flex-col justify-between h-full\">\n      <CardHeader className=\"items-center pb-0\">\n        <CardTitle className=\"text-lg font-normal\">Ragas Metrics</CardTitle>\n      </CardHeader>\n      <CardContent className=\"flex-1 pb-0 flex items-center justify-center\">\n        <div className=\"w-max h-max flex-1\">\n          <ChartContainer className=\"max-h-[192px] mx-auto\" config={chartConfig}>\n            <BarChart accessibilityLayer data={chartData}>\n              <CartesianGrid vertical={false} />\n              <XAxis\n                dataKey=\"metrics\"\n                tickLine={false}\n                tickMargin={10}\n                axisLine={false}\n                tickFormatter={(value) => value.slice(0, 3)}\n              />\n              <ChartTooltip\n                cursor={false}\n                content={<ChartTooltipContent indicator=\"dashed\" />}\n              />\n              <Bar dataKey=\"semantic_similarity\" fill=\"hsl(var(--chart-1))\" radius={4} />\n              <Bar dataKey=\"factual_correctness\" fill=\"hsl(var(--chart-2))\" radius={4} />\n            </BarChart>\n          </ChartContainer>\n        </div>\n      </CardContent>\n    </Card>\n  );\n}\n\nexport function RagasMetricsChartSkeleton () {\n  return (\n    <Card className=\"flex flex-col justify-between h-full\">\n      <CardHeader className=\"items-center pb-0\">\n        <CardTitle className=\"text-lg font-normal\">Ragas Metrics</CardTitle>\n      </CardHeader>\n      <CardContent className=\"flex-1 pb-0 flex items-center justify-center\">\n        <div className=\"w-max h-max flex-1\">\n          <ChartContainer className=\"max-h-[192px] mx-auto\" config={chartConfig}>\n            <BarChart accessibilityLayer>\n              <CartesianGrid vertical={false} />\n              <XAxis\n                dataKey=\"metrics\"\n                tickLine={false}\n                tickMargin={10}\n                axisLine={false}\n                tickFormatter={(value) => value.slice(0, 3)}\n              />\n              <ChartTooltip\n                cursor={false}\n                content={<ChartTooltipContent indicator=\"dashed\" />}\n              />\n              <Bar dataKey=\"semantic_similarity\" fill=\"hsl(var(--chart-1))\" radius={4} />\n              <Bar dataKey=\"factual_correctness\" fill=\"hsl(var(--chart-2))\" radius={4} />\n            </BarChart>\n          </ChartContainer>\n        </div>\n      </CardContent>\n    </Card>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-task-items-table.tsx",
    "content": "'use client';\n\nimport { type EvaluationTaskItem, listEvaluationTaskItems } from '@/api/evaluations';\nimport { datetime } from '@/components/cells/datetime';\nimport { metadataCell } from '@/components/cells/metadata';\nimport { mono } from '@/components/cells/mono';\nimport { percent } from '@/components/cells/percent';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { documentCell, evaluationTaskStatusCell, textChunksArrayCell } from '@/components/evaluations/cells';\nimport { type KeywordFilter, KeywordFilterToolbar } from '@/components/evaluations/keyword-filter-toolbar';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { useState } from 'react';\n\nconst helper = createColumnHelper<EvaluationTaskItem>();\n\nconst columns = [\n  helper.accessor('id', { header: 'ID', cell: mono }),\n  helper.accessor('status', { header: 'STATUS', cell: evaluationTaskStatusCell, meta: { colSpan: context => context.row.original.status === 'error' ? 3 : 1 } }),\n  helper.accessor('semantic_similarity', {\n    header: 'SEMANTIC SIMILARITY',\n    cell: context => percent(context, {\n      colorStops: [\n        { checkpoint: 0, color: 'hsl(var(--destructive))' },\n        { checkpoint: 1 - 0.618, color: 'hsl(var(--destructive))' },\n        { checkpoint: 0.5, color: 'hsl(var(--warning))' },\n        { checkpoint: 0.618, color: 'hsl(var(--success))' },\n        { checkpoint: 1, color: 'hsl(var(--success))' },\n      ],\n    }),\n    meta: { colSpan: context => context.row.original.status === 'error' ? 0 : 1 }\n  }),\n  helper.accessor('factual_correctness', {\n    header: 'FACTUAL CORRECTNESS',\n    cell: context => percent(context, {\n      colorStops: [\n        { checkpoint: 0, color: 'hsl(var(--destructive))' },\n        { checkpoint: 1 - 0.618, color: 'hsl(var(--destructive))' },\n        { checkpoint: 0.5, color: 'hsl(var(--warning))' },\n        { checkpoint: 0.618, color: 'hsl(var(--success))' },\n        { checkpoint: 1, color: 'hsl(var(--success))' },\n      ],\n    }),\n    meta: { colSpan: context => context.row.original.status === 'error' ? 0 : 1 }\n  }),\n  helper.accessor('query', { header: 'QUERY', cell: documentCell('Query') }),\n  helper.accessor('chat_engine', { header: 'CHAT ENGINE' }),\n  helper.accessor('reference', { header: 'REFERENCE', cell: documentCell('Reference') }),\n  helper.accessor('response', { header: 'RESPONSE', cell: documentCell('Response') }),\n  helper.accessor('retrieved_contexts', { header: 'RETRIEVED CONTEXTS', cell: textChunksArrayCell }),\n  helper.accessor('extra', { header: 'EXTRA', cell: metadataCell }),\n  helper.accessor('created_at', { header: 'CREATED AT', cell: datetime }),\n  helper.accessor('updated_at', { header: 'UPDATED AT', cell: datetime }),\n] as ColumnDef<EvaluationTaskItem>[];\n\nexport function EvaluationTaskItemsTable ({ evaluationTaskId }: { evaluationTaskId: number }) {\n  const [filter, setFilter] = useState<KeywordFilter>({});\n  return (\n    <DataTableRemote\n      columns={columns}\n      toolbar={() => (\n        <KeywordFilterToolbar onFilterChange={setFilter} />\n      )}\n      apiKey={`api.evaluation.tasks.${evaluationTaskId}.items.list`}\n      api={(page) => listEvaluationTaskItems(evaluationTaskId, { ...page, ...filter })}\n      apiDeps={[filter.keyword]}\n      idColumn=\"id\"\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/evaluation-tasks-table.tsx",
    "content": "'use client';\n\nimport { cancelEvaluationTask, type EvaluationTask, type EvaluationTaskWithSummary, listEvaluationTasks } from '@/api/evaluations';\nimport { actions } from '@/components/cells/actions';\nimport { datetime } from '@/components/cells/datetime';\nimport { link } from '@/components/cells/link';\nimport { mono } from '@/components/cells/mono';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { mutateEvaluationTasks } from '@/components/evaluations/hooks';\nimport { type KeywordFilter, KeywordFilterToolbar } from '@/components/evaluations/keyword-filter-toolbar';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { useState } from 'react';\n\nconst helper = createColumnHelper<EvaluationTaskWithSummary>();\n\nconst columns = [\n  helper.accessor('id', { header: 'ID', cell: mono }),\n  helper.accessor('name', { header: 'NAME', cell: link({ text: row => row.name, url: row => `/evaluation/tasks/${row.id}` }) }),\n  helper.accessor('dataset_id', { header: 'DATASET', cell: link({ text: row => String(row.dataset_id), url: row => `/evaluation/datasets/${row.dataset_id}` }) }),\n  helper.accessor('user_id', { header: 'USER ID' }),\n  helper.accessor('created_at', { header: 'CREATED AT', cell: datetime }),\n  helper.accessor('updated_at', { header: 'UPDATED AT', cell: datetime }),\n  helper.display({\n    id: 'op',\n    header: 'OPERATIONS',\n    cell: actions(row => [\n      {\n        title: 'View',\n        action: context => {\n          context.startTransition(() => {\n            context.router.push(`/evaluation/tasks/${row.id}`);\n          });\n        },\n      },\n      {\n        title: 'Cancel',\n        disabled: row.summary.not_start === 0,\n        action: async (context) => {\n          await cancelEvaluationTask(row.id);\n          void mutateEvaluationTasks();\n          context.setDropdownOpen(false);\n        },\n        dangerous: {},\n      },\n    ]),\n  }),\n] as ColumnDef<EvaluationTask>[];\n\nexport function EvaluationTasksTable () {\n  const [filter, setFilter] = useState<KeywordFilter>({});\n  return (\n    <DataTableRemote\n      columns={columns}\n      toolbar={() => (\n        <KeywordFilterToolbar onFilterChange={setFilter} />\n      )}\n      apiKey=\"api.evaluation.tasks.list\"\n      api={page => listEvaluationTasks({ ...page, ...filter })}\n      apiDeps={[filter.keyword]}\n      idColumn=\"id\"\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/hooks.ts",
    "content": "import { type EvaluationDataset, type EvaluationTask, getEvaluationDatasetItem, listEvaluationDatasets, listEvaluationTasks } from '@/api/evaluations';\nimport { listAllHelper, ServerError } from '@/lib/request';\nimport useSWR, { mutate } from 'swr';\n\nexport function useAllEvaluationDatasets (flag = true) {\n  return useSWR(flag && 'api.evaluation.datasets.list-all', () => listAllHelper(listEvaluationDatasets, 'id'));\n}\n\nexport function useEvaluationDataset (id: number | null | undefined) {\n  const { data, mutate, ...rest } = useAllEvaluationDatasets(id != null);\n\n  let evaluationDataset: EvaluationDataset | undefined;\n  let error = rest.error;\n  if (data) {\n    evaluationDataset = data.find(evaluationDataset => evaluationDataset.id === id);\n    if (!evaluationDataset && !error) {\n      error = new ServerError(new Response(null, { status: 404 }), 'Not found');\n    }\n  }\n\n  return {\n    evaluationDataset: data?.find(evaluationDataset => evaluationDataset.id === id),\n    ...rest,\n    error,\n  };\n}\n\nexport function useEvaluationDatasetItem (datasetId: number, id: number) {\n  const { data, ...rest } = useSWR(`api.evaluation.datasets.${datasetId}.items.${id}`, () => getEvaluationDatasetItem(datasetId, id));\n\n  return {\n    evaluationDatasetItem: data,\n    ...rest,\n  };\n}\n\nexport function mutateEvaluationDatasets () {\n  return mutate(key => {\n    if (typeof key === 'string') {\n      return key.startsWith(`api.evaluation.datasets.`);\n    }\n    return false;\n  });\n}\n\nexport function mutateEvaluationDataset (id: number) {\n  return mutate(key => {\n    if (typeof key === 'string') {\n      return key.startsWith(`api.evaluation.datasets.${id}.`);\n    }\n    return false;\n  });\n}\n\nexport function useAllEvaluationTasks (flag = true) {\n  return useSWR(flag && 'api.evaluation.tasks.list-all', () => listAllHelper(listEvaluationTasks, 'id'));\n}\n\nexport function useEvaluationTask (id: number | null | undefined) {\n  const { data, mutate, ...rest } = useAllEvaluationTasks(id != null);\n  let evaluationTask: EvaluationTask | undefined;\n  let error = rest.error;\n  if (data) {\n    evaluationTask = data.find(evaluationTask => evaluationTask.id === id);\n    if (!evaluationTask && !error) {\n      error = new ServerError(new Response(null, { status: 404 }), 'Not found');\n    }\n  }\n\n  return {\n    evaluationTask,\n    ...rest,\n    error,\n  };\n}\n\nexport function mutateEvaluationTasks () {\n  return mutate(key => {\n    if (typeof key === 'string') {\n      return key.startsWith(`api.evaluation.tasks.`);\n    }\n    return false;\n  });\n}\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/keyword-filter-toolbar.tsx",
    "content": "import { onSubmitHelper } from '@/components/form/utils';\nimport { Button } from '@/components/ui/button';\nimport { Form, FormControl, formDomEventHandlers, FormField } from '@/components/ui/form.beta';\nimport { Input } from '@/components/ui/input';\nimport { useDataTable } from '@/components/use-data-table';\nimport { useForm } from '@tanstack/react-form';\nimport { z } from 'zod';\n\nexport function KeywordFilterToolbar ({ onFilterChange }: { onFilterChange: (filters: KeywordFilter) => void }) {\n  const { loading } = useDataTable();\n\n  const form = useForm({\n    validators: {\n      onSubmit: keywordFilter,\n    },\n    defaultValues: {\n      keyword: '',\n    },\n    onSubmit: onSubmitHelper(keywordFilter, async ({ keyword, ...rest }) => {\n      const trimmedKeyword = keyword?.trim();\n      onFilterChange({\n        keyword: trimmedKeyword ? trimmedKeyword : undefined,\n        ...rest,\n      });\n    }, () => {}),\n  });\n\n  return (\n    <Form form={form} disabled={loading}>\n      <form className=\"flex gap-2 items-center\" {...formDomEventHandlers(form)}>\n        <FormField\n          name=\"keyword\"\n          render={(field) => (\n            <FormControl>\n              <Input\n                className=\"flex-1\"\n                placeholder=\"Search Evaluation Datasets...\"\n                name={field.name}\n                onBlur={field.handleBlur}\n                onChange={ev => field.handleChange(ev.target.value)}\n                value={field.state.value ?? ''}\n              />\n            </FormControl>\n          )}\n        />\n        <Button variant=\"secondary\" disabled={loading} type=\"submit\">\n          Search\n        </Button>\n      </form>\n    </Form>\n  );\n}\n\nconst keywordFilter = z.object({\n  keyword: z.string().optional(),\n});\n\nexport type KeywordFilter = z.infer<typeof keywordFilter>;\n"
  },
  {
    "path": "frontend/app/src/components/evaluations/update-evaluation-dataset-item-form.tsx",
    "content": "import { updateEvaluationDatasetItem, type UpdateEvaluationDatasetItemParams } from '@/api/evaluations';\nimport { mutateEvaluationDataset, useEvaluationDatasetItem } from '@/components/evaluations/hooks';\nimport { FormTextarea } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { createAccessorHelper, GeneralSettingsForm } from '@/components/settings-form';\nimport { GeneralSettingsField } from '@/components/settings-form/GeneralSettingsField';\nimport { z } from 'zod';\n\nconst field = formFieldLayout<{ value: any }>();\n\nexport function UpdateEvaluationDatasetItemForm ({ evaluationDatasetId, evaluationDatasetItemId }: { evaluationDatasetId: number, evaluationDatasetItemId: number }) {\n  const {\n    evaluationDatasetItem,\n    isLoading,\n    isValidating,\n    mutate,\n  } = useEvaluationDatasetItem(evaluationDatasetId, evaluationDatasetItemId);\n\n  if (!evaluationDatasetItem) {\n    return <></>;\n  }\n\n  return (\n    <div className=\"space-y-4 max-w-screen-sm\">\n      <GeneralSettingsForm\n        readonly={false}\n        data={evaluationDatasetItem}\n        loading={!evaluationDatasetItem || isLoading || isValidating}\n        onUpdate={async ({ query, reference, retrieved_contexts, extra }) => {\n          const item = await updateEvaluationDatasetItem(\n            evaluationDatasetId,\n            evaluationDatasetItemId,\n            {\n              query, retrieved_contexts, reference, extra,\n            },\n          );\n          void mutate(item, { revalidate: true });\n          void mutateEvaluationDataset(evaluationDatasetId);\n        }}\n      >\n        <GeneralSettingsField accessor={query} schema={textSchema}>\n          <field.Basic name=\"value\" label=\"Query\">\n            <CodeInput language=\"markdown\" />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={reference} schema={textSchema}>\n          <field.Basic name=\"value\" label=\"Reference\">\n            <CodeInput language=\"markdown\" />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={retrievedContexts} schema={textListSchema}>\n          <field.PrimitiveArray name=\"value\" label=\"Retrieved Contexts\" newItemValue={() => ''}>\n            <FormTextarea />\n          </field.PrimitiveArray>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={extra} schema={jsonSchema}>\n          <field.Basic name=\"value\" label=\"Extra\">\n            <CodeInput language=\"json\" />\n          </field.Basic>\n        </GeneralSettingsField>\n      </GeneralSettingsForm>\n    </div>\n  );\n}\n\nconst helper = createAccessorHelper<UpdateEvaluationDatasetItemParams>();\n\nconst query = helper.field('query');\nconst reference = helper.field('reference');\nconst retrievedContexts = helper.field('retrieved_contexts');\nconst extra = helper.jsonTextField('extra');\n\nconst textSchema = z.string().min(1);\nconst textListSchema = z.string().min(1, 'Non empty').array();\nconst jsonSchema = z.any();\n"
  },
  {
    "path": "frontend/app/src/components/feedbacks/feedbacks-table.tsx",
    "content": "'use client';\n\nimport { type Feedback, FeedbackType, listFeedbacks } from '@/api/feedbacks';\nimport { datetime } from '@/components/cells/datetime';\nimport { mono } from '@/components/cells/mono';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { ThumbsDownIcon, ThumbsUpIcon } from 'lucide-react';\nimport Link from 'next/link';\n\nconst helper = createColumnHelper<Feedback>();\n\nconst columns = [\n  helper.accessor('id', { header: 'ID', cell: mono }),\n  helper.accessor('feedback_type', {\n    header: 'TYPE',\n    cell: (cell) => {\n      const type = cell.getValue();\n      switch (type) {\n        case FeedbackType.like:\n          return (<span className=\"flex gap-2 items-center text-success\"><ThumbsUpIcon className=\"size-4\" /> LIKE</span>);\n        case FeedbackType.dislike:\n          return (<span className=\"flex gap-2 items-center text-destructive\"><ThumbsDownIcon className=\"size-4\" /> DISLIKE</span>);\n      }\n    },\n  }),\n  helper.accessor('origin', { header: 'FEEDBACK ORIGIN', cell: mono }),\n  helper.accessor('chat_origin', { header: 'CHAT ORIGIN', cell: mono }),\n  helper.display({\n    id: 'chat',\n    header: 'QUESTION',\n    cell: ({ row }) =>\n      <Link className=\"underline\" href={`/c/${row.original.chat_id}#${row.original.chat_message_id}`}>\n        <b>{row.original.chat_title}</b> <span className=\"text-muted-foreground\">{row.original.chat_id}#{row.original.chat_message_id}</span>\n      </Link>,\n  }),\n  helper.accessor('chat_message_content', {\n    header: 'CONTENT',\n    cell: cell => <>{cell.getValue().slice(0, 50)}... <span className=\"text-muted-foreground\">({cell.getValue().length + ' characters'})</span></>,\n  }),\n  helper.accessor('comment', { header: 'COMMENT', cell: mono }),\n  helper.accessor('user_email', { header: 'USER', cell: mono }),\n  helper.accessor('created_at', { header: 'CREATED AT', cell: datetime }),\n] as ColumnDef<Feedback>[];\n\nexport function FeedbacksTable () {\n  return (\n    <DataTableRemote\n      columns={columns}\n      apiKey=\"api.feedbacks.list\"\n      api={listFeedbacks}\n      idColumn=\"id\"\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/form/biz.tsx",
    "content": "import type { ChatEngine } from '@/api/chat-engines';\nimport { type EmbeddingModel } from '@/api/embedding-models';\nimport type { EvaluationDataset } from '@/api/evaluations';\nimport { type KnowledgeBaseSummary } from '@/api/knowledge-base';\nimport { type LLM } from '@/api/llms';\nimport type { ProviderOption } from '@/api/providers';\nimport { type Reranker } from '@/api/rerankers';\nimport { useAllChatEngines } from '@/components/chat-engine/hooks';\nimport { DateFormat } from '@/components/date-format';\nimport { CreateEmbeddingModelForm } from '@/components/embedding-models/CreateEmbeddingModelForm';\nimport { useAllEmbeddingModels } from '@/components/embedding-models/hooks';\nimport { useAllEvaluationDatasets } from '@/components/evaluations/hooks';\nimport { FormCombobox, type FormComboboxConfig, type FormComboboxProps } from '@/components/form/control-widget';\nimport { useAllKnowledgeBases } from '@/components/knowledge-base/hooks';\nimport { CreateLLMForm } from '@/components/llm/CreateLLMForm';\nimport { useAllLlms } from '@/components/llm/hooks';\nimport { ManagedDialog } from '@/components/managed-dialog';\nimport { ManagedPanelContext } from '@/components/managed-panel';\nimport { CreateRerankerForm } from '@/components/reranker/CreateRerankerForm';\nimport { useAllRerankers } from '@/components/reranker/hooks';\nimport { Badge } from '@/components/ui/badge';\nimport { DialogContent, DialogDescription, DialogHeader, DialogTitle } from '@/components/ui/dialog';\nimport { AlertTriangleIcon, DotIcon, PlusIcon } from 'lucide-react';\nimport { forwardRef, type Ref } from 'react';\n\nexport const EmbeddingModelSelect = forwardRef<any, Omit<FormComboboxProps<EmbeddingModel, 'id'>, 'config'> & { reverse?: boolean }>(({ reverse = true, ...props }, ref) => {\n  const { data: embeddingModels, isLoading, mutate, error } = useAllEmbeddingModels();\n\n  return (\n    <FormCombobox\n      {...props}\n      ref={ref}\n      placeholder=\"Default Embedding Model\"\n      config={{\n        options: embeddingModels ?? [],\n        optionKeywords: option => [option.name, option.provider, option.model],\n        loading: isLoading,\n        error,\n        renderValue: option => (<span>{option.name} <span className=\"text-muted-foreground\">[{option.vector_dimension}]</span></span>),\n        renderOption: option => (\n          <div>\n            <div><strong>{option.name}</strong></div>\n            <div className=\"text-xs text-muted-foreground\">\n              <strong>{option.provider}</strong>:{option.model} [{option.vector_dimension}]\n            </div>\n          </div>\n        ),\n        renderCreateOption: (Wrapper, onCreated) => (\n          <ManagedDialog>\n            <ManagedPanelContext.Consumer>\n              {({ setOpen }) => (\n                <>\n                  <Wrapper onSelect={() => setOpen(true)}>\n                    <span className=\"flex gap-1 items-center text-muted-foreground\">\n                      <PlusIcon className=\"size-4\" />\n                      Create New Embedding Model\n                    </span>\n                  </Wrapper>\n                  <DialogContent>\n                    <DialogHeader>\n                      <DialogTitle>\n                        Create New Embedding Model\n                      </DialogTitle>\n                      <DialogDescription />\n                    </DialogHeader>\n                    <CreateEmbeddingModelForm\n                      onCreated={embeddingModel => {\n                        mutate();\n                        onCreated(embeddingModel);\n                        setOpen(false);\n                      }}\n                    />\n                  </DialogContent>\n                </>\n              )}\n            </ManagedPanelContext.Consumer>\n          </ManagedDialog>\n        ),\n        key: 'id',\n      } satisfies FormComboboxConfig<EmbeddingModel, 'id'>}\n    />\n  );\n});\n\nEmbeddingModelSelect.displayName = 'EmbeddingModelSelect';\n\nexport const LLMSelect = forwardRef<any, Omit<FormComboboxProps<LLM, 'id'>, 'config'> & { reverse?: boolean }>(({ reverse = true, ...props }, ref) => {\n  const { data: llms, isLoading, mutate, error } = useAllLlms();\n\n  return (\n    <FormCombobox\n      {...props}\n      ref={ref}\n      placeholder=\"Default LLM\"\n      config={{\n        options: llms ?? [],\n        loading: isLoading,\n        error,\n        renderValue: option => (<span>{option.name}</span>),\n        renderOption: option => (\n          <div>\n            <div><strong>{option.name}</strong></div>\n            <div className=\"text-xs text-muted-foreground\">\n              <strong>{option.provider}</strong>:{option.model}\n            </div>\n          </div>\n        ),\n        renderCreateOption: (Wrapper, onCreated) => (\n          <ManagedDialog>\n            <ManagedPanelContext.Consumer>\n              {({ setOpen }) => (\n                <>\n                  <Wrapper onSelect={() => setOpen(true)}>\n                    <span className=\"flex gap-1 items-center text-muted-foreground\">\n                      <PlusIcon className=\"size-4\" />\n                      Create New LLM\n                    </span>\n                  </Wrapper>\n                  <DialogContent>\n                    <DialogHeader>\n                      <DialogTitle>\n                        Create New LLM\n                      </DialogTitle>\n                      <DialogDescription />\n                    </DialogHeader>\n                    <CreateLLMForm\n                      onCreated={llm => {\n                        mutate();\n                        onCreated(llm);\n                        setOpen(false);\n                      }}\n                    />\n                  </DialogContent>\n                </>\n              )}\n            </ManagedPanelContext.Consumer>\n          </ManagedDialog>\n        ),\n        optionKeywords: option => [option.name, option.provider, option.model],\n        key: 'id',\n      } satisfies FormComboboxConfig<LLM, 'id'>}\n    />\n  );\n});\n\nLLMSelect.displayName = 'LLMSelect';\n\nexport const RerankerSelect = forwardRef<any, Omit<FormComboboxProps<Reranker, 'id'>, 'config'> & { reverse?: boolean }>(({ reverse = true, ...props }, ref) => {\n  const { data: rerankers, mutate, isLoading, error } = useAllRerankers();\n\n  return (\n    <FormCombobox\n      {...props}\n      ref={ref}\n      placeholder=\"Default Reranker Model\"\n      config={{\n        options: rerankers ?? [],\n        optionKeywords: option => [option.name, option.provider, option.model],\n        loading: isLoading,\n        error,\n        renderValue: option => (<span>{option.name}</span>),\n        renderOption: option => (\n          <div>\n            <div><strong>{option.name}</strong></div>\n            <div className=\"text-xs text-muted-foreground\">\n              <strong>{option.provider}</strong>:{option.model}\n            </div>\n          </div>\n        ),\n        renderCreateOption: (Wrapper, onCreated) => (\n          <ManagedDialog>\n            <ManagedPanelContext.Consumer>\n              {({ setOpen }) => (\n                <>\n                  <Wrapper onSelect={() => setOpen(true)}>\n                    <span className=\"flex gap-1 items-center text-muted-foreground\">\n                      <PlusIcon className=\"size-4\" />\n                      Create New Reranker\n                    </span>\n                  </Wrapper>\n                  <DialogContent>\n                    <DialogHeader>\n                      <DialogTitle>\n                        Create New Reranker\n                      </DialogTitle>\n                      <DialogDescription />\n                    </DialogHeader>\n                    <CreateRerankerForm\n                      onCreated={reranker => {\n                        mutate();\n                        onCreated(reranker);\n                        setOpen(false);\n                      }}\n                    />\n                  </DialogContent>\n                </>\n              )}\n            </ManagedPanelContext.Consumer>\n          </ManagedDialog>\n        ),\n        key: 'id',\n      } satisfies FormComboboxConfig<Reranker, 'id'>}\n    />\n  );\n});\n\nRerankerSelect.displayName = 'RerankerSelect';\n\nexport function ProviderSelect<Provider extends ProviderOption> ({\n  options, isLoading, error, ref, ...props\n}: {\n  options: ProviderOption[] | undefined;\n  isLoading: boolean;\n  error: unknown;\n  ref?: Ref<any>\n}) {\n  return (\n    <FormCombobox\n      ref={ref}\n      config={{\n        options: options ?? [],\n        optionKeywords: option => [option.provider, option.provider_description ?? '', option.provider_display_name ?? ''],\n        loading: isLoading,\n        error,\n        renderOption: option => (\n          <div>\n            <div className=\"text-sm font-bold\">{option.provider_display_name ?? option.provider}</div>\n            {option.provider_description && <div className=\"text-xs text-muted-foreground break-words\" style={{ maxWidth: 'calc(var(--radix-select-trigger-width) - 68px)' }}>{option.provider_description}</div>}\n          </div>\n        ),\n        itemClassName: 'space-y-1',\n        renderValue: option => option.provider_display_name ?? option.provider,\n        key: 'provider',\n      } satisfies FormComboboxConfig<ProviderOption, 'provider'>}\n      contentWidth=\"anchor\"\n      {...props}\n    />\n  );\n}\n\nProviderSelect.displayName = 'ProviderSelect';\n\nexport const KBSelect = forwardRef<any, Omit<FormComboboxProps<KnowledgeBaseSummary, 'id'>, 'config'> & { reverse?: boolean }>(({ reverse = true, ...props }, ref) => {\n  const { data: kbs, isLoading, error } = useAllKnowledgeBases();\n\n  return (\n    <FormCombobox\n      ref={ref}\n      {...props}\n      placeholder=\"Select Knowledge Base\"\n      config={{\n        options: kbs ?? [],\n        optionKeywords: option => [String(option.id), option.name, option.description ?? ''],\n        loading: isLoading,\n        error,\n        renderValue: option => (\n          <div className=\"\">\n            <span>{option.name}</span>\n            <div className=\"text-xs text-muted-foreground ml-2 inline-flex gap-1 items-center\">\n              <span>\n                {(option.documents_total ?? 0) || <><AlertTriangleIcon className=\"text-warning inline-flex size-3 mr-0.5\" /> no</>} documents\n              </span>\n              <DotIcon className=\"size-4\" />\n              <span className=\"text-xs text-muted-foreground\">\n                {(option.data_sources_total ?? 0) || <><AlertTriangleIcon className=\"inline-flex size-3 mr-0.5\" /> no</>} data sources\n              </span>\n            </div>\n          </div>\n        ),\n        renderOption: option => (\n          <div className=\"space-y-1\">\n            <div>\n              <strong>\n                {option.name}\n              </strong>\n            </div>\n            <div className=\"text-xs text-muted-foreground flex gap-1 items-center\">\n              <span>\n                {(option.documents_total ?? 0) || <><AlertTriangleIcon className=\"text-warning inline-flex size-3 mr-0.5\" /> no</>} documents\n              </span>\n              <DotIcon className=\"size-4\" />\n              <span>\n                {(option.data_sources_total ?? 0) || <><AlertTriangleIcon className=\"inline-flex size-3 mr-0.5\" /> no</>} data sources\n              </span>\n            </div>\n            <div className=\"text-xs text-muted-foreground\">\n              {option.description}\n            </div>\n          </div>\n        ),\n        key: 'id',\n      } satisfies FormComboboxConfig<KnowledgeBaseSummary, 'id'>}\n    />\n  );\n});\n\nKBSelect.displayName = 'KBSelect';\n\nexport function EvaluationDatasetSelect ({ reverse = true, ref, ...props }: Omit<FormComboboxProps<EvaluationDataset, 'id'>, 'config'> & { reverse?: boolean, ref?: Ref<any> }) {\n\n  const { data: evaluationDatasets, isLoading, error } = useAllEvaluationDatasets();\n\n  return (\n    <FormCombobox\n      {...props}\n      ref={ref}\n      placeholder=\"Select Evaluation Dataset\"\n      config={{\n        options: evaluationDatasets ?? [],\n        optionKeywords: option => [option.name],\n        loading: isLoading,\n        error,\n        renderValue: option => (<span>{option.name}</span>),\n        renderOption: option => (\n          <div>\n            <div><strong>{option.name}</strong></div>\n            <div className=\"text-xs text-muted-foreground\">\n              Updated At: <DateFormat date={option.updated_at} />\n            </div>\n          </div>\n        ),\n        key: 'id',\n      } satisfies FormComboboxConfig<EvaluationDataset, 'id'>}\n    />\n  );\n}\n\nexport function ChatEngineSelect ({ reverse = true, ref, ...props }: Omit<FormComboboxProps<ChatEngine, 'name'>, 'config'> & { reverse?: boolean, ref?: Ref<any> }) {\n  const { data: chatEngines, isLoading, error } = useAllChatEngines();\n\n  return (\n    <FormCombobox\n      {...props}\n      ref={ref}\n      placeholder=\"Default Chat Engine\"\n      config={{\n        options: chatEngines ?? [],\n        optionKeywords: option => [option.name],\n        loading: isLoading,\n        error,\n        renderValue: option => (\n          <>\n            <strong>{option.name}</strong>\n            {!!option.engine_options.external_engine_config?.stream_chat_api_url && <Badge className=\"ml-2 font-normal\" variant=\"secondary\">External Chat Engine</Badge>}\n            {!!option.engine_options.knowledge_graph?.enabled && <Badge className=\"ml-2 font-normal\" variant=\"secondary\">Knowledge Graph</Badge>}\n          </>\n        ),\n        renderOption: option => (\n          <div>\n            <strong>{option.name}</strong>\n            {option.is_default && <Badge className=\"ml-2\">Default</Badge>}\n            {!!option.engine_options.external_engine_config?.stream_chat_api_url && <Badge className=\"ml-2 font-normal\" variant=\"secondary\">External Chat Engine</Badge>}\n            {!!option.engine_options.knowledge_graph?.enabled && <Badge className=\"ml-2 font-normal\" variant=\"secondary\">Knowledge Graph</Badge>}\n          </div>\n        ),\n        key: 'name',\n      } satisfies FormComboboxConfig<ChatEngine, 'name'>}\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/form/control-widget.tsx",
    "content": "import { Checkbox } from '@/components/ui/checkbox';\nimport { Command, CommandEmpty, CommandGroup, CommandInput, CommandItem, CommandList } from '@/components/ui/command';\nimport { Popover, PopoverContent } from '@/components/ui/popover';\nimport { Switch } from '@/components/ui/switch';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { cn } from '@/lib/utils';\nimport * as PopoverPrimitive from '@radix-ui/react-popover';\nimport type { SwitchProps } from '@radix-ui/react-switch';\nimport type { DeepKeys } from '@tanstack/react-form';\nimport { CheckIcon, ChevronDown, Loader2Icon, TriangleAlertIcon, XCircleIcon } from 'lucide-react';\nimport * as React from 'react';\nimport { type ChangeEvent, type ComponentProps, type FC, forwardRef, type Key, type ReactElement, type ReactNode, type Ref, useState } from 'react';\n\nexport interface FormControlWidgetProps<T, Optional extends boolean = false> {\n  id?: string;\n  'aria-describedby'?: string;\n  'aria-invalid'?: boolean;\n\n  onChange?: (value: (Optional extends false ? T : T | undefined) | ChangeEvent<any>) => void;\n  onBlur?: () => void;\n  value?: T;\n  disabled?: boolean;\n  name?: any; // type issue\n  ref?: Ref<any>;\n}\n\nexport { Input as FormInput, type InputProps as FormInputProps } from '@/components/ui/input';\n\nexport { Textarea as FormTextarea, type TextareaProps as FormTextareaProps } from '@/components/ui/textarea';\n\nexport interface FormSwitchProps extends FormControlWidgetProps<boolean>, Omit<SwitchProps, 'checked' | 'onCheckedChange' | keyof FormControlWidgetProps<boolean>> {\n}\n\nexport const FormSwitch = forwardRef<any, FormSwitchProps>(({ value, onChange, ...props }, forwardedRef) => {\n  return (\n    <Switch\n      {...props}\n      ref={forwardedRef}\n      checked={value}\n      onCheckedChange={onChange}\n    />\n  );\n});\n\nFormSwitch.displayName = 'FormSwitch';\n\nexport interface FormCheckboxProps extends FormControlWidgetProps<boolean>, Omit<ComponentProps<typeof Checkbox>, 'checked' | 'onCheckedChange' | keyof FormControlWidgetProps<boolean>> {\n}\n\nexport const FormCheckbox = forwardRef<any, FormCheckboxProps>(({ value, onChange, ...props }, forwardedRef) => {\n  return (\n    <Checkbox\n      {...props}\n      ref={forwardedRef}\n      checked={value}\n      onCheckedChange={value => onChange?.(!!value)}\n    />\n  );\n});\n\nFormCheckbox.displayName = 'FormCheckbox';\n\nexport interface FormSelectConfig<T extends object, K extends KeyOfType<T, Key>> {\n  loading?: boolean;\n  error?: unknown;\n  options: T[];\n  key: K;\n  clearable?: boolean;\n  itemClassName?: string;\n  renderOption: (option: T) => ReactNode;\n  renderValue?: (option: T) => ReactNode;\n}\n\nexport interface FormComboboxConfig<T extends object, K extends KeyOfType<T, Key>> extends FormSelectConfig<T, K> {\n  optionKeywords: (option: T) => string[];\n  renderCreateOption?: (wrapper: FC<{ onSelect: () => void, children: ReactNode }>, onCreated: (item: T) => void) => ReactNode;\n}\n\nexport interface FormComboboxProps<T extends object, K extends KeyOfType<T, Key>> extends FormControlWidgetProps<T[K], true> {\n  children?: ReactElement<any>;\n  placeholder?: string;\n  config: FormComboboxConfig<T, K>;\n  contentWidth?: 'anchor';\n  ref?: Ref<any>;\n}\n\nexport function FormCombobox<T extends object, K extends KeyOfType<T, Key>> ({ ref, config, placeholder, value, onChange, name, disabled, children, contentWidth = 'anchor', ...props }: FormComboboxProps<T, K>) {\n  const [open, setOpen] = useState(false);\n  const isConfigReady = !config.loading && !config.error;\n  const current = config.options.find(option => option[config.key] === value);\n\n  return (\n    <Popover open={open} onOpenChange={setOpen}>\n      <div className={cn('flex items-center gap-2', (props as any).className)}>\n        <PopoverPrimitive.Trigger\n          ref={ref}\n          disabled={disabled || !isConfigReady}\n          {...props}\n          className={cn(\n            'flex h-10 w-full items-center justify-between rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 [&>span]:line-clamp-1',\n          )}\n          asChild={!!children}\n        >\n          {config.loading\n            ? <span>Loading options...</span>\n            : !!config.error\n              ? <span className=\"text-destructive\">{getErrorMessage(config.error)}</span>\n              : (children ? children : current ? (config.renderValue ?? config.renderOption)(current) : <span className=\"text-muted-foreground\">{placeholder}</span>)\n          }\n          <span className=\"flex-1\" />\n          {config.loading\n            ? <Loader2Icon className=\"size-4 opacity-50 animate-spin repeat-infinite\" />\n            : config.error\n              ? <TriangleAlertIcon className=\"size-4 text-destructive opacity-50\" />\n              : config.clearable !== false && current != null && !disabled\n                ? <FormComboboxClearButton onClick={() => onChange?.(undefined)} />\n                : <ChevronDown className=\"h-4 w-4 opacity-50\" />}\n        </PopoverPrimitive.Trigger>\n      </div>\n      <PopoverContent className={cn('p-0 focus:outline-none', contentWidth === 'anchor' && 'w-[--radix-popover-trigger-width]')} align=\"start\" collisionPadding={8}>\n        <Command>\n          <CommandInput />\n          <CommandList>\n            <CommandGroup>\n              {config.renderCreateOption && config.renderCreateOption(\n                ({ onSelect, children }) => (\n                  <CommandItem value=\"$create$\" onSelect={onSelect} className={config.itemClassName} forceMount>\n                    {children}\n                  </CommandItem>\n                ),\n                (item) => {\n                  onChange?.(item[config.key]);\n                  setOpen(false);\n                })}\n              {config.options.map(option => (\n                <CommandItem\n                  key={option[config.key] as Key}\n                  value={String(option[config.key])}\n                  keywords={config.optionKeywords(option).flatMap(item => item.split(/\\s+/))}\n                  className={cn('group', config.itemClassName)}\n                  onSelect={value => {\n                    const item = config.options.find(option => String(option[config.key]) === value);\n                    if (item) {\n                      onChange?.(item[config.key]);\n                      setOpen(false);\n                    }\n                  }}\n                >\n                  {config.renderOption(option)}\n                  <CheckIcon className={cn('ml-auto size-4 opacity-0', current?.[config.key] === option[config.key] && 'opacity-100')} />\n                </CommandItem>\n              ))}\n            </CommandGroup>\n            <CommandEmpty className=\"text-muted-foreground/50 text-xs p-4 text-center\">\n              Empty List\n            </CommandEmpty>\n          </CommandList>\n        </Command>\n      </PopoverContent>\n    </Popover>\n  );\n}\n\nFormCombobox.displayName = 'FormCombobox';\n\nexport function FormComboboxClearButton ({ onClick }: { onClick?: () => void }) {\n  return (\n    <TooltipProvider delayDuration={0}>\n      <Tooltip>\n        <TooltipTrigger asChild>\n          <span role=\"button\" className=\"ml-2 opacity-50 hover:opacity-100\" onClick={onClick}>\n            <XCircleIcon className=\"size-4\" />\n          </span>\n        </TooltipTrigger>\n        <TooltipContent>\n          Clear select\n        </TooltipContent>\n      </Tooltip>\n    </TooltipProvider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/form/create-entity-form.tsx",
    "content": "import { formFieldLayout, type TypedFormFieldLayouts } from '@/components/form/field-layout';\nimport { FormRootError } from '@/components/form/root-error';\nimport { Form, formDomEventHandlers, FormSubmit } from '@/components/ui/form.beta';\nimport { useForm } from '@tanstack/react-form';\nimport { type FunctionComponent, type ReactNode, useId, useState } from 'react';\nimport { z } from 'zod';\n\nexport interface CreateEntityFormBetaProps<R, I> {\n  defaultValues?: I;\n  onCreated?: (data: R) => void;\n  onInvalid?: () => void;\n  transitioning?: boolean;\n  children?: ReactNode;\n}\n\ninterface CreateEntityFormComponent<R, I> extends FunctionComponent<CreateEntityFormBetaProps<R, I>>, TypedFormFieldLayouts<I> {\n}\n\nexport function withCreateEntityForm<T, R, I = any> (\n  schema: z.ZodType<T, any, I>,\n  createApi: (data: T) => Promise<R>,\n  { submitTitle = 'Create', submittingTitle }: {\n    submitTitle?: ReactNode\n    submittingTitle?: ReactNode\n  } = {},\n): CreateEntityFormComponent<R, I> {\n\n  function CreateEntityFormBeta (\n    {\n      defaultValues,\n      onCreated,\n      onInvalid,\n      transitioning,\n      children,\n    }: CreateEntityFormBetaProps<R, I>,\n  ) {\n    const id = useId();\n    const [submissionError, setSubmissionError] = useState<unknown>();\n\n    const form = useForm<I>({\n      validators: {\n        onSubmit: schema,\n      },\n      defaultValues,\n      onSubmit: async ({ value, formApi }) => {\n        try {\n          const data = await createApi(schema.parse(value));\n          onCreated?.(data);\n        } catch (e) {\n          setSubmissionError(e);\n        }\n      },\n      onSubmitInvalid: () => {\n        onInvalid?.();\n      },\n    });\n\n    return (\n      <Form form={form} disabled={transitioning} submissionError={submissionError}>\n        <form\n          id={id}\n          className=\"max-w-screen-sm space-y-4\"\n          {...formDomEventHandlers(form, transitioning)}\n        >\n          {children}\n          <FormRootError />\n          <FormSubmit form={id} transitioning={transitioning} submittingChildren={submittingTitle}>\n            {submitTitle}\n          </FormSubmit>\n        </form>\n      </Form>\n    );\n  }\n\n  Object.assign(CreateEntityFormBeta, formFieldLayout<I>());\n\n  return CreateEntityFormBeta as CreateEntityFormComponent<R, I>;\n}\n"
  },
  {
    "path": "frontend/app/src/components/form/field-layout.tsx",
    "content": "import type { FormControlWidgetProps } from '@/components/form/control-widget';\nimport type { CreateEntityFormBetaProps } from '@/components/form/create-entity-form';\nimport { Button } from '@/components/ui/button';\nimport { FormControl, FormDescription, FormField, FormItem, FormLabel, FormMessage, useFormContext } from '@/components/ui/form.beta';\nimport { isChangeEvent } from '@/lib/react';\nimport { cn } from '@/lib/utils';\nimport { type DeepKeys, type DeepValue, type FieldApi, FieldValidators, type FormApi, useField } from '@tanstack/react-form';\nimport { MinusIcon, PlusIcon } from 'lucide-react';\nimport { cloneElement, type ComponentProps, type ComponentType, type ReactElement, type ReactNode } from 'react';\nimport { z } from 'zod';\n\n/**\n * This function creates typed form layout components.\n *\n * - If T is ZodType, TFormData is the input\n * - If T is {@link CreateEntityFormBetaProps} or return type of {@link import('@/components/form/create-entity-form').withCreateEntityForm}, TFormData is the form input type\n * - If T is Record<string, any>, TFormData is itself\n */\nexport function formFieldLayout<T> (): TypedFormFieldLayouts<\n  T extends z.ZodType<any, any, any>\n    ? z.input<T>\n    : T extends CreateEntityFormBetaProps<any, infer I>\n      ? I\n      : T extends ComponentType<CreateEntityFormBetaProps<any, infer I>>\n        ? I\n        : T extends Record<string, any>\n          ? T\n          : never\n> {\n  return {\n    Basic: FormFieldBasicLayout,\n    Contained: FormFieldContainedLayout,\n    Inline: FormFieldInlineLayout,\n    PrimitiveArray: FormPrimitiveArrayFieldBasicLayout,\n  } satisfies TypedFormFieldLayouts<unknown> as never;\n}\n\nexport interface TypedFormFieldLayouts<TFormData> {\n  Basic: <TName extends DeepKeys<TFormData>> (props: ComponentProps<typeof FormFieldBasicLayout<TFormData, TName>>) => ReactNode,\n  Contained: <TName extends DeepKeys<TFormData>> (props: ComponentProps<typeof FormFieldContainedLayout<TFormData, TName>>) => ReactNode,\n  Inline: <TName extends DeepKeys<TFormData>> (props: ComponentProps<typeof FormFieldInlineLayout<TFormData, TName>>) => ReactNode,\n  PrimitiveArray: <TName extends DeepKeysOfType<TFormData, any[]>> (props: ComponentProps<typeof FormPrimitiveArrayFieldBasicLayout<TFormData, TName>>) => ReactNode,\n}\n\ntype WidgetProps<TFormData, TName extends DeepKeys<TFormData>> = Required<Omit<FormControlWidgetProps<DeepValue<TFormData, TName>>, 'id' | 'aria-invalid' | 'aria-describedby'>>\n\nexport interface FormFieldLayoutProps<\n  TFormData,\n  TName extends DeepKeys<TFormData> = DeepKeys<TFormData>\n> {\n  name: TName;\n  label: ReactNode;\n  required?: boolean;\n  description?: ReactNode;\n  /**\n   * Fallback value is used for display. This value will not submit to server.\n   */\n  fallbackValue?: DeepValue<TFormData, TName>;\n  defaultValue?: NoInfer<DeepValue<TFormData, TName>>;\n  validators?: FieldValidators<TFormData, TName>;\n\n  children: ((props: WidgetProps<TFormData, TName>) => ReactNode) | ReactElement<WidgetProps<TFormData, TName>>;\n}\n\nfunction renderWidget<\n  TFormData,\n  TName extends DeepKeys<TFormData> = DeepKeys<TFormData>\n> (\n  children: FormFieldLayoutProps<TFormData, TName>['children'],\n  field: FieldApi<TFormData, TName>,\n  form: FormApi<TFormData>,\n  disabled: boolean | undefined,\n  fallbackValue?: DeepValue<TFormData, TName>,\n) {\n\n  const data: WidgetProps<TFormData, TName> = {\n    value: field.state.value ?? fallbackValue as any,\n    name: field.name,\n    onChange: ((ev: any) => {\n      if (isChangeEvent(ev)) {\n        const el = ev.currentTarget;\n        if (el instanceof HTMLInputElement) {\n          if (el.type === 'number') {\n            field.handleChange(el.valueAsNumber as any);\n            return;\n          } else if (el.type === 'date' || el.type === 'datetime-local') {\n            field.handleChange(el.valueAsDate as any);\n            return;\n          }\n        }\n        field.handleChange((el as HTMLInputElement).value as any);\n      } else {\n        field.handleChange(ev);\n      }\n    }),\n    onBlur: field.handleBlur,\n    disabled: disabled || field.form.state.isSubmitting,\n    ref: () => {},\n  };\n\n  if (typeof children === 'function') {\n    return children(data);\n  } else {\n    return cloneElement(children, data);\n  }\n}\n\nexport function FormFieldBasicLayout<\n  TFormData,\n  TName extends DeepKeys<TFormData> = DeepKeys<TFormData>\n> ({\n  name,\n  label,\n  description,\n  required,\n  fallbackValue,\n  defaultValue,\n  validators,\n  children,\n}: FormFieldLayoutProps<TFormData, TName>) {\n  return (\n    <FormField<TFormData, TName>\n      name={name}\n      defaultValue={defaultValue}\n      render={(field, form, disabled) => (\n        <FormItem>\n          <FormLabel>\n            {label}\n            {required && <sup className=\"text-destructive\" aria-hidden>*</sup>}\n          </FormLabel>\n          <FormControl>\n            {renderWidget<TFormData, TName>(children, field, form, disabled, fallbackValue)}\n          </FormControl>\n          {description && <FormDescription className=\"break-words\">{description}</FormDescription>}\n          <FormMessage />\n        </FormItem>\n      )}\n      validators={validators}\n    />\n  );\n}\n\nexport function FormFieldInlineLayout<\n  TFormData,\n  TName extends DeepKeys<TFormData> = DeepKeys<TFormData>\n> ({\n  name,\n  label,\n  description,\n  defaultValue,\n  validators,\n  children,\n}: FormFieldLayoutProps<TFormData, TName>) {\n  return (\n    <FormField<TFormData, TName>\n      name={name}\n      defaultValue={defaultValue}\n      render={(field, form, disabled) => (\n        <FormItem>\n          <div className=\"flex items-center gap-2\">\n            <FormControl>\n              {renderWidget<TFormData, TName>(children, field, form, disabled)}\n            </FormControl>\n            <FormLabel>{label}</FormLabel>\n          </div>\n          {description && <FormDescription>{description}</FormDescription>}\n          <FormMessage />\n        </FormItem>\n      )}\n      validators={validators}\n    />\n  );\n}\n\nexport function FormFieldContainedLayout<\n  TFormData,\n  TName extends DeepKeys<TFormData> = DeepKeys<TFormData>\n> ({\n  name,\n  label,\n  description,\n  required,\n  fallbackValue,\n  defaultValue,\n  validators,\n  children,\n  unimportant = false,\n}: FormFieldLayoutProps<TFormData, TName> & { unimportant?: boolean }) {\n  return (\n    <FormField<TFormData, TName>\n      name={name}\n      defaultValue={defaultValue}\n      validators={validators}\n      render={(field, form, disabled) => (\n        <FormItem className=\"flex flex-row items-center justify-between rounded-lg border p-4\">\n          <div className=\"space-y-0.5\">\n            <FormLabel className={cn(!unimportant && 'text-base')}>\n              {label}\n              {required && <sup className=\"text-destructive\" aria-hidden>*</sup>}\n            </FormLabel>\n            {description && <FormDescription>\n              {description}\n            </FormDescription>}\n          </div>\n          <FormControl>\n            {renderWidget<TFormData, TName>(children, field, form, disabled, fallbackValue)}\n          </FormControl>\n        </FormItem>\n      )}\n    />\n  );\n}\n\nexport type DeepKeysOfType<T, Value> = string & keyof { [P in DeepKeys<T> as DeepValue<T, P> extends Value ? P : never]: any }\n\nexport function FormPrimitiveArrayFieldBasicLayout<\n  TFormData,\n  TName extends DeepKeysOfType<TFormData, any[]> = DeepKeysOfType<TFormData, any[]>\n> ({\n  name,\n  label,\n  description,\n  children,\n  required,\n  defaultValue,\n  validators,\n  newItemValue,\n}: FormFieldLayoutProps<TFormData, TName> & { newItemValue: () => any }) {\n  const { form } = useFormContext<TFormData>();\n  const arrayField = useField<TFormData, TName>({\n    name,\n    form,\n    mode: 'array',\n  });\n\n  const arrayFieldValue: any[] = arrayField.state.value as never;\n\n  return (\n    <FormField\n      name={name}\n      defaultValue={defaultValue}\n      validators={validators}\n      render={() => (\n        <FormItem>\n          <FormLabel>\n            {label}\n            {required && <sup className=\"text-destructive\" aria-hidden>*</sup>}\n          </FormLabel>\n          <ol className=\"space-y-2\">\n            {arrayFieldValue.map((_, index) => (\n              <FormField\n                key={index}\n                name={`${name}[${index}]`}\n                render={(field, form, disabled) => (\n                  <li>\n                    <FormItem>\n                      <div className=\"flex gap-2\">\n                        <FormControl className=\"flex-1\">\n                          {renderWidget<any, any>(children, field as any, form as any, disabled)}\n                        </FormControl>\n                        <Button\n                          disabled={disabled}\n                          size=\"icon\"\n                          variant=\"secondary\"\n                          type=\"button\"\n                          onClick={() => {\n                            void arrayField.insertValue(index, newItemValue());\n                          }}\n                        >\n                          <PlusIcon className=\"size-4\" />\n                        </Button>\n                        <Button\n                          disabled={disabled}\n                          size=\"icon\"\n                          variant=\"ghost\"\n                          type=\"button\"\n                          onClick={() => {\n                            void arrayField.removeValue(index);\n                          }}\n                        >\n                          <MinusIcon className=\"size-4\" />\n                        </Button>\n                      </div>\n                      <FormMessage />\n                    </FormItem>\n                  </li>\n                )}\n              />\n            ))}\n          </ol>\n          <Button\n            className=\"w-full\"\n            variant=\"outline\"\n            type=\"button\"\n            onClick={() => {\n              void arrayField.pushValue(newItemValue());\n            }}\n          >\n            <PlusIcon className=\"w-4 mr-1\" />\n            New Item\n          </Button>\n          {description && <FormDescription className=\"break-words\">{description}</FormDescription>}\n          <FormMessage />\n        </FormItem>\n      )}\n    />\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/form/root-error.tsx",
    "content": "import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { useFormContext as useTanstackFormContext } from '@/components/ui/form.beta';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { FormState } from '@tanstack/react-form';\n\nexport function FormRootError ({ title = 'Operation failed' }: { title?: string }) {\n  const { form, submissionError } = useTanstackFormContext();\n\n  return (\n    <form.Subscribe selector={state => getFormError(state, submissionError)}>\n      {(firstError) => !!firstError && (\n        <Alert variant=\"destructive\">\n          <AlertTitle>{title}</AlertTitle>\n          <AlertDescription>{firstError}</AlertDescription>\n        </Alert>\n      )}\n    </form.Subscribe>\n  );\n}\n\nfunction getFormError (state: FormState<any>, error: unknown) {\n  if (error) {\n    return getErrorMessage(error);\n  }\n  const submitError = state.errorMap.onSubmit;\n  if (!submitError) {\n    return undefined;\n  }\n  if (typeof submitError === 'object') {\n    return submitError.form;\n  }\n  return undefined;\n}\n"
  },
  {
    "path": "frontend/app/src/components/form/utils.ts",
    "content": "import type { FieldInfo, FormApi, ValidationErrorMap } from '@tanstack/react-form';\nimport { z, ZodError } from 'zod';\n\nexport function onSubmitHelper<T> (\n  schema: z.ZodType<T>,\n  action: (value: T, form: FormApi<T>) => Promise<void>,\n  setSubmissionError: (error: unknown) => void,\n): (props: { value: T, formApi: FormApi<T> }) => Promise<void> {\n  return async ({ value, formApi }) => {\n    try {\n      setSubmissionError(undefined);\n      await action(schema.parse(value), formApi);\n    } catch (e) {\n      if (e != null && e instanceof ZodError) {\n        const { formErrors, fieldErrors } = e.flatten();\n        const rest = applyFormError(formApi, Object.assign(\n          {} as Record<string, string[]>,\n          fieldErrors,\n          formErrors.length > 0 ? {\n            '.': formErrors,\n          } : {},\n        ), 'onSubmit');\n        if (rest) {\n          setSubmissionError(Object.values(rest).join(' '));\n        }\n      } else {\n        setSubmissionError(e);\n      }\n    }\n  };\n}\n\n/**\n * Applies error messages to the appropriate fields in the given form API based on the provided error body.\n * Matches each error message to its corresponding field within the form using the specified validation phase.\n * Returns any unhandled errors that do not correspond to fields in the form.\n *\n * @see https://github.com/pingcap-inc/labs.tidb.io/blob/4cf4a288439cb941dc2283ad1e8aafd479c510bd/frontend/src/lib/form.ts\n * @param formApi - The form API instance that contains field information and methods to apply errors.\n * @param body - The error body containing error messages keyed by field names.\n * @param phase - The validation phase under which the errors should be categorized.\n * @return Returns an object containing unhandled errors if any, or undefined if all errors are handled.\n */\nfunction applyFormError<FormApi extends { fieldInfo: Record<string, FieldInfo<any>> }> (\n  formApi: FormApi,\n  body: Record<string, string[]>,\n  phase: keyof ValidationErrorMap,\n) {\n  const unhandled: Record<string, string[]> = {};\n  Object.entries(body).forEach(([key, value]) => {\n    if (key in formApi.fieldInfo) {\n      const field = formApi.fieldInfo[key]?.instance;\n      if (field) {\n        field.setErrorMap({ [phase]: value });\n        return;\n      }\n    }\n    unhandled[key] = value;\n  });\n  if (Object.keys(unhandled).length > 0) {\n    return unhandled;\n  } else {\n    return undefined;\n  }\n}"
  },
  {
    "path": "frontend/app/src/components/form/widgets/CodeInput.tsx",
    "content": "import type { FormControlWidgetProps } from '@/components/form/control-widget';\nimport { useActiveTheme } from '@/components/use-active-theme';\nimport { cn } from '@/lib/utils';\nimport { Loader2Icon } from 'lucide-react';\nimport mergeRefs from 'merge-refs';\nimport type * as monaco from 'monaco-editor';\nimport { forwardRef, useEffect, useRef, useState } from 'react';\n\nexport interface CodeInputProps extends FormControlWidgetProps<string> {\n  className?: string;\n  placeholder?: string;\n  language: 'json' | 'markdown';\n}\n\nexport const CodeInput = forwardRef<any, CodeInputProps>(({\n  id,\n  name,\n  language,\n  className,\n  value,\n  onChange,\n  onBlur,\n  disabled,\n  placeholder,\n  'aria-describedby': ariaDescribedBy,\n  'aria-invalid': ariaInvalid,\n}, forwardedRef) => {\n  const monacoRef = useRef<typeof monaco>(undefined);\n  const [editor, setEditor] = useState<monaco.editor.IStandaloneCodeEditor | undefined>(undefined);\n\n  // useImperativeHandle(forwardedRef, () => editor, [editor]);\n\n  const theme = useActiveTheme();\n\n  const ref = useRef<HTMLDivElement>(null);\n\n  useEffect(() => {\n    if (ref) {\n      const ac = new AbortController();\n      let editor: monaco.editor.IStandaloneCodeEditor | undefined;\n\n      import('monaco-editor').then(monaco => {\n        monacoRef.current = monaco;\n        if (ac.signal.aborted) return;\n\n        editor = monaco.editor.create(ref.current!, {\n          value: value,\n          language,\n          automaticLayout: true,\n          lineNumbers: 'off',\n          glyphMargin: false,\n          lineDecorationsWidth: 0,\n          lineNumbersMinChars: 2,\n          scrollBeyondLastLine: false,\n          minimap: {\n            enabled: false,\n          },\n          tabSize: 2,\n          theme: theme === 'dark' ? 'vs-dark' : undefined,\n        });\n\n        setEditor(editor);\n      });\n\n      return () => {\n        ac.abort();\n        editor?.dispose();\n      };\n    }\n  }, []);\n\n  useEffect(() => {\n    if (editor && onBlur) {\n      const { dispose } = editor.onDidBlurEditorText(onBlur);\n      return dispose;\n    }\n  }, [editor, onBlur]);\n\n  useEffect(() => {\n    if (editor && onChange) {\n      const { dispose } = editor.onDidChangeModelContent(() => onChange(editor.getValue()));\n      return dispose;\n    }\n  }, [editor, onChange]);\n\n  useEffect(() => {\n    if (editor) {\n      editor.updateOptions({\n        lineNumbers: disabled ? 'off' : 'on',\n        lineDecorationsWidth: disabled ? 0 : undefined,\n        readOnly: disabled,\n      });\n    }\n  }, [editor, disabled]);\n\n  useEffect(() => {\n    if (editor) {\n      const rValue = value || '';\n      if (rValue !== editor.getValue()) {\n        editor.setValue(rValue);\n      }\n    }\n  }, [editor, value]);\n\n  useEffect(() => {\n    if (theme === 'dark') {\n      editor?.updateOptions({\n        theme: 'vs-dark',\n      });\n    } else {\n      editor?.updateOptions({\n        theme: 'vs',\n      });\n    }\n  }, [theme]);\n\n  useEffect(() => {\n    if (editor && placeholder && monacoRef.current) {\n      const monaco = monacoRef.current;\n\n      /**\n       * Represents an placeholder renderer for monaco editor\n       * Roughly based on https://github.com/microsoft/vscode/blob/main/src/vs/workbench/contrib/codeEditor/browser/untitledTextEditorHint/untitledTextEditorHint.ts\n       */\n      class PlaceholderContentWidget implements monaco.editor.IContentWidget {\n        private static readonly ID = 'editor.widget.placeholderHint';\n\n        private domNode: HTMLElement | undefined;\n\n        constructor (\n          private readonly placeholder: string,\n          private readonly editor: monaco.editor.ICodeEditor,\n        ) {\n          // register a listener for editor code changes\n          // ensure that on initial load the placeholder is shown\n          this.onDidChangeModelContent();\n        }\n\n        private onDidChangeModelContent (): void {\n          if (this.editor.getValue() === '') {\n            this.editor.addContentWidget(this);\n          } else {\n            this.editor.removeContentWidget(this);\n          }\n        }\n\n        getId (): string {\n          return PlaceholderContentWidget.ID;\n        }\n\n        getDomNode (): HTMLElement {\n          if (!this.domNode) {\n            this.domNode = document.createElement('div');\n            this.domNode.className = 'text-muted-foreground whitespace-pre-wrap text-italic opacity-70';\n            this.domNode.style.width = 'max-content';\n            this.domNode.style.pointerEvents = 'none';\n            this.domNode.textContent = this.placeholder;\n            this.editor.applyFontInfo(this.domNode);\n          }\n\n          return this.domNode;\n        }\n\n        getPosition (): monaco.editor.IContentWidgetPosition | null {\n          return {\n            position: { lineNumber: 1, column: 1 },\n            preference: [monaco.editor.ContentWidgetPositionPreference.EXACT],\n          };\n        }\n      }\n\n      const widget = new PlaceholderContentWidget(placeholder, editor);\n      const { dispose } = editor.onDidChangeModelContent(() => {\n        if (editor.getValue() === '') {\n          editor.addContentWidget(widget);\n        } else {\n          editor.removeContentWidget(widget);\n        }\n      });\n\n      return () => {\n        dispose();\n      };\n    }\n  }, [editor, placeholder]);\n\n  return (\n    <>\n      <div\n        id={id}\n        className={cn('border rounded w-full min-h-48', className, !editor && 'hidden')}\n        ref={mergeRefs(ref, forwardedRef)}\n        aria-describedby={ariaDescribedBy}\n        aria-invalid={ariaInvalid}\n      />\n      {!editor && <div className=\"flex text-xs text-muted-foreground\">Initializing code editor... <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" /></div>}\n      <input className=\"hidden\" value={value} readOnly name={name} />\n    </>\n  );\n});\n"
  },
  {
    "path": "frontend/app/src/components/form/widgets/FileInput.tsx",
    "content": "import type { FormControlWidgetProps } from '@/components/form/control-widget';\nimport { Button } from '@/components/ui/button';\nimport { cn } from '@/lib/utils';\nimport { useId } from 'react';\n\nexport interface FileInputProps extends FormControlWidgetProps<File, true> {\n  accept: string[];\n}\n\nexport const FileInput = ({\n  name,\n  accept,\n  value,\n  onChange,\n  disabled,\n  ref,\n  onBlur,\n  ...props\n}: FileInputProps) => {\n  const id = useId();\n  return (\n    <>\n      <input\n        className=\"hidden\"\n        id={id}\n        name={name}\n        type=\"file\"\n        accept={accept.join(', ')}\n        onChange={event => {\n          const file = event.target.files?.item(0) ?? undefined;\n          onChange?.(file);\n        }}\n        disabled={disabled}\n      />\n      <Button\n        variant=\"outline\"\n        disabled={disabled}\n        ref={ref}\n        onBlur={onBlur}\n        {...props}\n        className={cn('flex w-full justify-start font-normal', value == null && 'text-muted-foreground')}\n        onClick={(event) => {\n          (props as any).onClick?.(event);\n          if (!event.defaultPrevented) {\n            document.getElementById(id)?.click();\n          }\n        }}\n        type=\"button\"\n      >\n        {value\n          ? value.name\n          : 'Select file'}\n      </Button>\n    </>\n  );\n};\n"
  },
  {
    "path": "frontend/app/src/components/form/widgets/FilesInput.tsx",
    "content": "import { DataTable } from '@/components/data-table';\nimport { DataTableHeading } from '@/components/data-table-heading';\nimport type { FormControlWidgetProps } from '@/components/form/control-widget';\nimport { Button } from '@/components/ui/button';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { filesize } from 'filesize';\nimport { FileMinus2Icon } from 'lucide-react';\nimport { type ChangeEvent, forwardRef, useId } from 'react';\n\nexport interface FilesInputProps extends FormControlWidgetProps<File[]> {\n  accept: string[];\n}\n\nconst helper = createColumnHelper<File>();\n\nexport const FilesInput = forwardRef<any, FilesInputProps>(({\n  accept,\n  name,\n  id,\n  disabled,\n  onBlur,\n  value: files = [],\n  onChange: onFilesChange,\n  ...props\n}, ref) => {\n  const hookId = useId();\n  id = id ?? hookId;\n\n  const columns: ColumnDef<File, any>[] = [\n    helper.accessor('name', {}),\n    helper.accessor('type', {}),\n    helper.accessor('size', { cell: cell => filesize(cell.getValue()) }),\n    helper.display({\n      id: 'op',\n      cell: (cell) => <Button\n        type=\"button\"\n        variant=\"ghost\"\n        size=\"sm\"\n        className='text-xs'\n        onClick={() => {\n          files = [...files];\n          files.splice(cell.row.index, 1);\n          onFilesChange?.(files);\n        }}\n      >\n        <FileMinus2Icon className=\"size-4 mr-1\" />\n        Remove\n      </Button>,\n    }),\n  ];\n\n  const handleSelectFiles = (ev: ChangeEvent<HTMLInputElement>) => {\n    ev.preventDefault();\n    if (ev.target.files) {\n      const newFiles = Array.from(ev.target.files);\n      onFilesChange?.([...files, ...newFiles]);\n    }\n  };\n\n  return (\n    <>\n      <DataTable<File, any>\n        classNames={{\n          td: 'px-2 py-1',\n        }}\n        before={\n          <DataTableHeading>\n            <input\n              className=\"hidden\"\n              id={id}\n              name={name}\n              type=\"file\"\n              multiple\n              accept={accept.join(', ')}\n              onChange={handleSelectFiles}\n              disabled={disabled}\n            />\n            <Button\n              variant=\"secondary\"\n              disabled={disabled}\n              ref={ref}\n              onBlur={onBlur}\n              {...props}\n              onClick={(event) => {\n                (props as any).onClick?.(event);\n                if (!event.defaultPrevented) {\n                  document.getElementById(id)?.click();\n                }\n              }}\n              type=\"button\"\n            >\n              Select files...\n            </Button>\n          </DataTableHeading>\n        }\n        columns={columns}\n        data={files}\n        hideHeader\n      />\n    </>\n  );\n});\n\nFilesInput.displayName = 'FilesInput';\n"
  },
  {
    "path": "frontend/app/src/components/form/widgets/PromptInput.tsx",
    "content": "import { type FormControlWidgetProps, FormTextarea } from '@/components/form/control-widget';\nimport { buttonVariants } from '@/components/ui/button';\nimport { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { cn } from '@/lib/utils';\nimport { forwardRef } from 'react';\n\nexport interface PromptInputProps extends FormControlWidgetProps<string> {\n  className?: string;\n}\n\nexport const PromptInput = forwardRef<any, PromptInputProps>(({ className, ...props }: PromptInputProps, ref) => {\n  return (\n    <Dialog>\n      <DialogTrigger ref={ref} className={cn(buttonVariants({ variant: 'outline' }), 'flex gap-1 w-full font-normal', className)}>\n        {'Edit prompt'}\n        <span className=\"text-muted-foreground\">({props.value?.length} characters)</span>\n      </DialogTrigger>\n      <DialogContent className=\"h-2/3\">\n        <DialogHeader className=\"sr-only\">\n          <DialogTitle>Update Prompt</DialogTitle>\n          <DialogDescription />\n        </DialogHeader>\n        <FormTextarea {...props} />\n      </DialogContent>\n    </Dialog>\n  );\n});\n\nPromptInput.displayName = 'PromptInput';\n"
  },
  {
    "path": "frontend/app/src/components/form-sections.tsx",
    "content": "import type { FieldApi } from '@tanstack/react-form';\nimport { createContext, type Dispatch, type ReactNode, type SetStateAction, useContext, useEffect, useState } from 'react';\n\ntype FieldsMap = Map<string, Map<string, FieldApi<any, any>>>;\ntype FormSectionsContextValues = readonly [FieldsMap, Dispatch<SetStateAction<FieldsMap>>];\nconst FormSectionsContext = createContext<FormSectionsContextValues | undefined>(undefined);\n\nconst EMPTY_SET = new Map<string, FieldApi<any, any>>();\n\nexport function FormSectionsProvider ({ children }: { children?: ReactNode }) {\n  const context = useState<Map<string, Map<string, FieldApi<any, any>>>>(() => new Map());\n  return (\n    <FormSectionsContext value={context}>\n      {children}\n    </FormSectionsContext>\n  );\n}\n\nexport function useFormSectionFields (section: string): ReadonlyMap<string, FieldApi<any, any>> {\n  const [map] = useContext(FormSectionsContext) ?? [];\n  return map?.get(section) ?? EMPTY_SET;\n}\n\ninterface FormSectionContextValues {\n  register (field: FieldApi<any, any>): () => void;\n}\n\nconst FormSectionContext = createContext<FormSectionContextValues>({\n  register (field: FieldApi<any, any>): () => void {\n    return () => {};\n  },\n});\n\nexport function FormSection ({ value, children }: { value: string, children?: ReactNode }) {\n  const [_, setMap] = useContext(FormSectionsContext) ?? [];\n\n  const register = (field: FieldApi<any, any>) => {\n    setMap?.(map => {\n      map = new Map(map);\n      const fieldMap = new Map(map.get(value));\n      map.set(value, fieldMap);\n\n      fieldMap.set(field.name, field);\n\n      return map;\n    });\n\n    return () => {\n      setMap?.(map => {\n        if (!map.get(value)?.has(field.name)) {\n          return map;\n        }\n        const fieldMap = new Map(map.get(value));\n        map.set(value, fieldMap);\n\n        fieldMap.delete(field.name);\n\n        return map;\n      });\n    };\n  };\n\n  return (\n    <FormSectionContext value={{ register }}>\n      {children}\n    </FormSectionContext>\n  );\n}\n\nexport function useRegisterFieldInFormSection (field: FieldApi<any, any, any, any>) {\n  const { register } = useContext(FormSectionContext);\n  useEffect(() => {\n    return register(field);\n  }, [field]);\n}\n"
  },
  {
    "path": "frontend/app/src/components/graph/GraphCreateEntity.tsx",
    "content": "import { createSynopsisEntity, type KnowledgeGraphEntity } from '@/api/graph';\nimport { SearchEntity } from '@/components/graph/components/SearchEntity';\nimport { SearchEntityById } from '@/components/graph/components/SearchEntityById';\nimport { useEntities } from '@/components/graph/selectEntities';\nimport { Loader } from '@/components/loader';\nimport { Badge } from '@/components/ui/badge';\nimport { Button } from '@/components/ui/button';\nimport { Form, FormControl, FormField, FormItem, FormLabel, FormMessage } from '@/components/ui/form';\nimport { Input } from '@/components/ui/input';\nimport { Label } from '@/components/ui/label';\nimport { Textarea } from '@/components/ui/textarea';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { cn } from '@/lib/utils';\nimport { zodResolver } from '@hookform/resolvers/zod';\nimport { Loader2Icon, Maximize2Icon } from 'lucide-react';\nimport type monaco from 'monaco-editor';\nimport { lazy, type ReactNode, Suspense, useRef } from 'react';\nimport { useForm } from 'react-hook-form';\nimport z from 'zod';\n\nconst JsonEditor = lazy(() => import('./components/JsonEditor').then(res => ({ default: res.JsonEditor })));\n\n/**\n * @deprecated\n */\nexport function GraphCreateEntity ({ className, knowledgeBaseId, onCreated }: { className?: string, knowledgeBaseId: number, onCreated: (entity: KnowledgeGraphEntity) => void }) {\n  const useEntitiesReturns = useEntities();\n  const { clearSelection, ...useEntitiesRequired } = useEntitiesReturns;\n  const { selectedEntities } = useEntitiesRequired;\n\n  return (\n    <div className=\"space-y-4\">\n      <div className=\"lg:max-w-[50vw]\">\n        <CreateEntityForm\n          entities={selectedEntities}\n          onSubmit={async (data) => {\n            const createdEntity = await createSynopsisEntity(knowledgeBaseId, data);\n            onCreated(createdEntity);\n          }}\n          onClearSelection={clearSelection}\n          afterEntities={(\n            <>\n              <SearchEntity knowledgeBaseId={knowledgeBaseId} {...useEntitiesRequired} />\n              <SearchEntityById knowledgeBaseId={knowledgeBaseId} {...useEntitiesRequired} />\n            </>\n          )}\n        />\n      </div>\n    </div>\n  );\n}\n\nfunction CreateEntityForm ({ className, entities, onSubmit, onClearSelection, afterEntities }: { className?: string, entities: KnowledgeGraphEntity[], onSubmit: (data: z.infer<typeof createEntitySchema> & { meta: any, entities: number[] }) => Promise<void>, onClearSelection: (id?: number) => void, afterEntities?: ReactNode }) {\n  const form = useForm<z.infer<typeof createEntitySchema>>({\n    resolver: zodResolver(createEntitySchema),\n    defaultValues: {\n      name: '',\n      description: '',\n      topic: '',\n    },\n  });\n  const metaRef = useRef<monaco.editor.IStandaloneCodeEditor | undefined | null>(null);\n\n  const handleSubmit = form.handleSubmit(async values => onSubmit({\n    ...values,\n    meta: JSON.parse(metaRef.current!.getValue()),\n    entities: entities.map(entity => Number(entity.id)),\n  }));\n\n  const containerRef = useRef<HTMLDivElement>(null);\n\n  const handleClickFullscreen = () => {\n    containerRef.current?.requestFullscreen();\n  };\n\n  return (\n    <Form {...form}>\n      <form className={cn('space-y-4', className)} onSubmit={handleSubmit}>\n        <h2 className=\"font-bold text-xl\">Create synopsis entity</h2>\n        <FormField\n          name=\"name\"\n          render={({ field }) => (\n            <FormItem>\n              <FormLabel>Name</FormLabel>\n              <FormControl>\n                <Input {...field} />\n              </FormControl>\n              <FormMessage />\n            </FormItem>\n          )}\n        />\n        <FormField\n          name=\"description\"\n          render={({ field }) => (\n            <FormItem>\n              <FormLabel>Description</FormLabel>\n              <FormControl>\n                <Textarea {...field} />\n              </FormControl>\n              <FormMessage />\n            </FormItem>\n          )}\n        />\n        <FormField\n          name=\"topic\"\n          render={({ field }) => (\n            <FormItem>\n              <FormLabel>Topic</FormLabel>\n              <FormControl>\n                <Input {...field} />\n              </FormControl>\n              <FormMessage />\n            </FormItem>\n          )}\n        />\n        <FormItem>\n          <div className=\"flex items-center justify-between\">\n            <Label>\n              Meta\n            </Label>\n            <button className=\"text-foreground/50 hover:text-foreground transition-colors\" onMouseDown={handleClickFullscreen}>\n              <Maximize2Icon className=\"w-3 h-3\" />\n            </button>\n          </div>\n          <FormControl>\n            <div className=\"relative w-full h-32 border\" ref={containerRef}>\n              <Suspense fallback={<Loader loading>Initializing JSON editor...</Loader>}>\n                <JsonEditor defaultValue=\"{}\" ref={metaRef} />\n              </Suspense>\n            </div>\n          </FormControl>\n        </FormItem>\n        <FormItem>\n          <Label>Entities</Label>\n          <TooltipProvider>\n            <div className=\"flex gap-2 flex-wrap\">\n              {entities.map(entity => (\n                <Tooltip key={entity.id}>\n                  <TooltipTrigger type=\"button\">\n                    <Badge key={entity.id} variant=\"secondary\">{entity.name} #{entity.id}</Badge>\n                  </TooltipTrigger>\n                  <TooltipContent className=\"space-y-2 w-[360px]\">\n                    <h3 className=\"font-bold\">{entity.name} #{entity.id}</h3>\n                    <p className=\"text-xs text-accent-foreground\">{entity.description}</p>\n                    <Button variant=\"secondary\" className=\"w-full mt-4\" size=\"sm\" onClick={() => onClearSelection(Number(entity.id))}>Remove from entities</Button>\n                  </TooltipContent>\n                </Tooltip>\n              ))}\n            </div>\n          </TooltipProvider>\n          <div className=\"grid grid-cols-3 gap-2\">\n            {afterEntities}\n            <Button variant=\"ghost\" onClick={() => onClearSelection()}>\n              Clear Selection\n            </Button>\n          </div>\n        </FormItem>\n        <div className=\"!mt-8\">\n          <Button type=\"submit\" disabled={form.formState.disabled}>\n            {form.formState.isSubmitting && <Loader2Icon className=\"w-4 h-4 mr-2 animate-spin repeat-infinite\" />}\n            Create Entity\n          </Button>\n        </div>\n      </form>\n    </Form>\n  );\n}\n\nconst createEntitySchema = z.object({\n  name: z.string().min(1).regex(/\\S/),\n  description: z.string().min(1).regex(/\\S/),\n  topic: z.string().min(1).regex(/\\S/),\n});\n"
  },
  {
    "path": "frontend/app/src/components/graph/GraphEditor.tsx",
    "content": "'use client';\n\nimport { getChatMessageSubgraph } from '@/api/chats';\nimport { getEntitySubgraph, streamEntireKnowledgeGraph, type KnowledgeGraph, search } from '@/api/graph';\nimport { LinkDetails } from '@/components/graph/components/LinkDetails';\nimport { NetworkViewer, type NetworkViewerDetailsProps } from '@/components/graph/components/NetworkViewer';\nimport { NodeDetails } from '@/components/graph/components/NodeDetails';\nimport type { IdType } from '@/components/graph/network/Network';\nimport { useNetwork } from '@/components/graph/useNetwork';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { buttonVariants } from '@/components/ui/button';\nimport { Input } from '@/components/ui/input';\nimport { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';\nimport { useSearchParam } from '@/components/use-search-param';\nimport { getErrorMessage } from '@/lib/errors';\nimport isHotkey from 'is-hotkey';\nimport Link from 'next/link';\nimport { useEffect, useRef, useState } from 'react';\nimport { createPortal } from 'react-dom';\nimport useSWR from 'swr';\n\nexport function GraphEditor ({ knowledgeBaseId }: { knowledgeBaseId: number }) {\n  const [query, setQuery] = useSearchParam('query', 'sample-question:What is TiDB?');\n\n  const [key, fetcher] = getFetchInfo(knowledgeBaseId, query);\n\n  const { data: span, isLoading, error } = useSWR(key, fetcher, { revalidateOnFocus: false });\n\n  const network = useNetwork(span);\n\n  const ref = useRef<HTMLDivElement>(null);\n\n  return (\n    <div className=\"p-4 space-y-4\">\n      <SubgraphSelector knowledgeBaseId={knowledgeBaseId} query={query} onQueryChange={setQuery} />\n      {(error != null) && <Alert variant=\"destructive\">\n        <AlertTitle>Failed to fetch subgraph</AlertTitle>\n        <AlertDescription>{getErrorMessage(error)}</AlertDescription>\n      </Alert>}\n      <div className=\"w-full flex gap-4\">\n        <div className=\"flex-1\">\n          <NetworkViewer\n            key={query}\n            className=\"border rounded h-auto aspect-square\"\n            loading={isLoading}\n            loadingTitle={'Loading knowledge graph...'}\n            network={network}\n            Details={(props) => (\n              ref.current && createPortal(\n                <Editor\n                  knowledgeBaseId={knowledgeBaseId}\n                  {...props}\n                  onEnterSubgraph={(type, id) => {\n                    props.onTargetChange(undefined);\n                    switch (type) {\n                      case `entity`:\n                        setQuery(`entity:${id}`);\n                        break;\n                      case 'document':\n                        setQuery(`document:${id}`);\n                        break;\n                    }\n                  }}\n                />,\n                ref.current!,\n              )\n            )}\n          />\n        </div>\n        <div className=\"w-96 flex-shrink-0 relative\" style={{ padding: '0.1px' }} ref={ref} />\n      </div>\n    </div>\n  );\n}\n\nfunction SubgraphSelector ({ knowledgeBaseId, query, onQueryChange }: { knowledgeBaseId: number, query: string | null, onQueryChange: (query: string) => void }) {\n  const [initialType = 'sample-question', initialInput = 'What is TiDB?'] = parseQuery(query) ?? [];\n\n  const [type, setType] = useState<string>(initialType);\n  const [input, setInput] = useState<string>(initialInput);\n\n  useEffect(() => {\n    const [type = 'sample-question', input = 'What is TiDB?'] = parseQuery(query) ?? [];\n    setType(type);\n    setInput(input);\n  }, [query]);\n\n  return (\n    <div className=\"flex gap-2\">\n      <Select value={type} onValueChange={type => {\n        setType(type);\n        setInput('');\n        if (type === 'entire-knowledge-graph') {\n          onQueryChange(`${type}:`);\n        }\n      }}>\n        <SelectTrigger className=\"w-max\">\n          <SelectValue />\n        </SelectTrigger>\n        <SelectContent>\n          <SelectItem value=\"sample-question\">Sample Question</SelectItem>\n          <SelectItem value=\"entity\">Entity ID</SelectItem>\n          <SelectItem value=\"message-subgraph\">Message Subgraph</SelectItem>\n          <SelectItem value=\"trace\" disabled>Langfuse Trace ID (UUID)</SelectItem>\n          <SelectItem value=\"document\" disabled>Document URI</SelectItem>\n          <SelectItem value=\"entire-knowledge-graph\">Entire Knowledge Graph</SelectItem>\n        </SelectContent>\n      </Select>\n      {type !== 'entire-knowledge-graph' && (\n        <>\n          <Input\n            className=\"flex-1\"\n            value={input}\n            onChange={event => setInput(event.target.value)}\n            onKeyDown={event => {\n              if (isHotkey('Enter', event)) {\n                onQueryChange(`${type}:${input}`);\n              }\n            }}\n          />\n        </>\n      )}\n      <Link className={buttonVariants({})} href={`/knowledge-bases/${knowledgeBaseId}/knowledge-graph-explorer/create-synopsis-entity`}>\n        Create Synopsis Entity\n      </Link>\n    </div>\n  );\n}\n\nfunction Editor ({ knowledgeBaseId, network, target, onTargetChange, onEnterSubgraph }: NetworkViewerDetailsProps & { knowledgeBaseId: number, onEnterSubgraph: (type: string, entityId: IdType) => void }) {\n  if (target) {\n    if (target.type === 'link') {\n      return <LinkDetails knowledgeBaseId={knowledgeBaseId} relationship={network.link(target.id)!} onClickTarget={onTargetChange} onEnterSubgraph={onEnterSubgraph} />;\n    } else if (target.type === 'node') {\n      return <NodeDetails knowledgeBaseId={knowledgeBaseId} entity={network.node(target.id)!} onClickTarget={onTargetChange} onEnterSubgraph={onEnterSubgraph} />;\n    }\n  }\n\n  return <div className=\"flex items-center justify-center h-40 text-sm text-muted-foreground font-bold\">\n    Select an entity or relationship\n  </div>;\n}\n\nfunction getFetchInfo (kbId: number, query: string | null): [string | false, () => Promise<KnowledgeGraph>] {\n  if (!query) {\n    return [false, () => Promise.reject()];\n  }\n\n  const parsedQuery = parseQuery(query);\n  if (!parsedQuery) {\n    return [false, () => Promise.reject()];\n  }\n\n  const param = parsedQuery[1];\n\n\n  switch (parsedQuery[0]) {\n    // case 'trace':\n    //   return ['get', `/api/v1/traces/${parsedQuery[1]}/knowledge-graph-retrieval`];\n    // case 'document':\n    //   return ['get', `/api/v1/indexes/${indexName}/chunks/${encodeURIComponent(parsedQuery[1])}/subgraph`];\n    case 'entity':\n      return [`api.knowledge-bases.${kbId}.graph.entity-subgraph?id=${param}`, () => getEntitySubgraph(kbId, parseInt(param))];\n    case 'sample-question':\n      return [`api.knowledge-bases.${kbId}.graph.search?query=${param}`, () => search(kbId, { query: param })];\n    case 'message-subgraph':\n      return [`api.chats.get-message-subgraph?id=${param}`, () => getChatMessageSubgraph(parseInt(param))];\n    case 'entire-knowledge-graph':\n      return [`api.knowledge-bases.${kbId}.graph.entire-knowledge-graph`, () => streamEntireKnowledgeGraph(kbId)];\n  }\n\n  return [false, () => Promise.reject()];\n}\n\nfunction parseQuery (query: string | null) {\n  if (!query) {\n    return null;\n  }\n\n  const idx = query.indexOf(':');\n  if (idx < 0) {\n    return null;\n  }\n\n  return [query.slice(0, idx), query.slice(idx + 1)] as const;\n}\n"
  },
  {
    "path": "frontend/app/src/components/graph/action.ts",
    "content": "import { useState } from 'react';\n\ntype ActionStatus<T> = {\n  pending: true\n  loading: false\n  data: undefined\n  error: undefined\n} | {\n  pending: false\n  loading: true\n  data: undefined\n  error: undefined\n} | {\n  pending: false\n  loading: false\n  data: undefined\n  error: Error\n} | {\n  pending: false\n  loading: false\n  data: T\n  error: undefined\n}\n\nexport function useAction<T> (action: () => Promise<T>) {\n  const [pending, setPending] = useState(true);\n  const [loading, setLoading] = useState(false);\n  const [data, setData] = useState<T | undefined>(undefined);\n  const [error, setError] = useState(undefined);\n\n  return {\n    pending,\n    loading,\n    data,\n    error,\n    run () {\n      if (!pending) {\n        return Promise.reject('');\n      }\n      setPending(false);\n      setLoading(true);\n      setError(undefined);\n      setData(undefined);\n      return action()\n        .then(data => {\n          setData(data);\n          return data;\n        }, error => {\n          setError(error);\n          return Promise.reject(error);\n        })\n        .finally(() => {\n          setLoading(false);\n          setPending(true);\n        });\n    },\n    reset () {\n      setPending(true);\n      setLoading(false);\n      setError(undefined);\n      setData(undefined);\n    },\n  } as ActionStatus<T> & {\n    run: () => Promise<T>,\n    reset: () => void,\n  };\n}"
  },
  {
    "path": "frontend/app/src/components/graph/components/EditingButton.tsx",
    "content": "import { Button } from '@/components/ui/button';\nimport { ExternalLinkIcon, Loader2Icon, PencilIcon } from 'lucide-react';\n\nexport interface EditingButtonProps {\n  editing: boolean;\n  onEnterSubgraph?: () => void;\n  subGraphTitle?: string;\n  onStartEdit: () => void;\n  onSave: () => void;\n  onReset: () => void;\n  busy: boolean;\n}\n\nexport function EditingButton ({ onEnterSubgraph, subGraphTitle = 'Subgraph', editing, onStartEdit, onReset, onSave, busy }: EditingButtonProps) {\n  return editing\n    ? (\n      <div className=\"flex gap-2 items-center\">\n        <Button\n          size=\"sm\"\n          disabled={busy}\n          onClick={onSave}>\n          {busy && <Loader2Icon className=\"w-3 h-3 mr-1 animate-spin repeat-infinite\" />}\n          Save\n        </Button>\n        <Button\n          size=\"sm\"\n          variant=\"ghost\"\n          disabled={busy}\n          onClick={onReset}>\n          exit\n        </Button>\n      </div>\n    )\n    : (\n      <div className=\"flex gap-1 items-center\">\n        <Button size=\"sm\" variant=\"default\" onClick={onStartEdit}>\n          <PencilIcon className=\"w-3 h-3 mr-2\" />\n          Edit\n        </Button>\n        {onEnterSubgraph && <Button size=\"sm\" variant=\"secondary\" onClick={onEnterSubgraph}>\n          <ExternalLinkIcon className=\"w-3 h-3 mr-2\" />\n          {subGraphTitle}\n        </Button>}\n      </div>\n    );\n}"
  },
  {
    "path": "frontend/app/src/components/graph/components/EntitiesTable.tsx",
    "content": "import { metadataCell } from '@/components/cells/metadata';\nimport type { Entity } from '@/components/graph/utils';\nimport { Loader } from '@/components/loader';\nimport { Checkbox } from '@/components/ui/checkbox';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from '@/components/ui/table';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { cn } from '@/lib/utils';\nimport { TooltipArrow } from '@radix-ui/react-tooltip';\nimport { type ColumnDef, flexRender, Table as ReactTable } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { AlertTriangleIcon } from 'lucide-react';\n\nexport interface RemoteEntity extends Partial<Omit<Entity, 'id'>> {\n  id: number | string;\n  isLoading?: boolean;\n  error?: unknown;\n}\n\nexport function EntitiesTable ({ className, isLoading, table }: { className?: string, isLoading: boolean, table: ReactTable<RemoteEntity> }) {\n  return (\n    <TooltipProvider>\n      <div className={cn('rounded-md border max-w-full max-h-[360px] overflow-x-hidden relative', className)}>\n        <Loader loading={isLoading}>\n          Searching entities...\n        </Loader>\n        <Table className=\"table-auto text-xs whitespace-nowrap\">\n          <TableHeader>\n            {table.getHeaderGroups().map((headerGroup) => (\n              <TableRow key={headerGroup.id} className=\"sticky top-0\">\n                {headerGroup.headers.map((header) => {\n                  return (\n                    <TableHead key={header.id}>\n                      {header.isPlaceholder\n                        ? null\n                        : flexRender(\n                          header.column.columnDef.header,\n                          header.getContext(),\n                        )}\n                    </TableHead>\n                  );\n                })}\n              </TableRow>\n            ))}\n          </TableHeader>\n          <TableBody>\n            {table.getRowModel().rows?.length ? (\n              table.getRowModel().rows.map((row) => (\n                <TableRow\n                  key={row.id}\n                  data-state={row.getIsSelected() && 'selected'}\n                >\n                  {row.getVisibleCells().map((cell) => (\n                    <TableCell key={cell.id}>\n                      {flexRender(cell.column.columnDef.cell, cell.getContext())}\n                    </TableCell>\n                  ))}\n                </TableRow>\n              ))\n            ) : (\n              <TableRow>\n                <TableCell colSpan={entityColumns.length} className=\"h-24 text-center\">\n                  No results.\n                </TableCell>\n              </TableRow>\n            )}\n          </TableBody>\n        </Table>\n      </div>\n    </TooltipProvider>\n  );\n}\n\nconst helper = createColumnHelper<RemoteEntity>();\n\nexport const entityColumns: ColumnDef<RemoteEntity, any>[] = [\n  {\n    id: 'select',\n    header: ({ table }) => (\n      <Checkbox\n        checked={\n          table.getIsAllPageRowsSelected() ||\n          (table.getIsSomePageRowsSelected() && 'indeterminate')\n        }\n        onCheckedChange={(value) => table.toggleAllPageRowsSelected(!!value)}\n        aria-label=\"Select all\"\n      />\n    ),\n    cell: ({ row }) => (\n      <Checkbox\n        checked={row.getIsSelected()}\n        onCheckedChange={(value) => row.toggleSelected(!!value)}\n        aria-label=\"Select row\"\n      />\n    ),\n    enableSorting: false,\n    enableHiding: false,\n  },\n  helper.accessor('id', {}),\n  helper.accessor('name', {\n    cell: ({ getValue, row }) => {\n      if (row.original.isLoading) {\n        return <Skeleton className=\"w-12 h-4\" />;\n      }\n\n      if (!!row.original.error) {\n        return (\n          <span className=\"text-destructive flex items-center gap-2 text-xs\">\n            <AlertTriangleIcon className=\"w-3 h-3\" />\n            <span>\n              Failed to find entity #{row.id}\n            </span>\n          </span>\n        );\n      }\n\n      return getValue();\n    },\n  }),\n  helper.accessor('description', {\n    cell: ({ getValue, row }) => {\n      if (row.original.isLoading) {\n        return <Skeleton className=\"w-12 h-4\" />;\n      }\n      const maxLength = 80;\n      const description = getValue() ?? '';\n      if (description.length > maxLength) {\n        return (\n          <Tooltip>\n            <TooltipTrigger>\n              {description.slice(0, maxLength - 3) + '...'}\n            </TooltipTrigger>\n            <TooltipContent className=\"max-w-sm break-words text-wrap\" align=\"start\">\n              <TooltipArrow className=\"fill-background\" />\n              <p className=\"text-xs\">\n                {description}\n              </p>\n            </TooltipContent>\n          </Tooltip>\n        );\n      } else {\n        return description;\n      }\n    },\n  }),\n  helper.accessor('meta', { cell: metadataCell }),\n];\n\n"
  },
  {
    "path": "frontend/app/src/components/graph/components/InputField.tsx",
    "content": "import { forwardRef, type InputHTMLAttributes, type ReactNode } from 'react';\n\nexport interface InputFieldProps extends Omit<InputHTMLAttributes<HTMLInputElement>, 'className'> {\n  label: ReactNode;\n}\n\nexport const InputField = forwardRef<HTMLInputElement, InputFieldProps>(({\n  label, ...inputProps\n}, ref) => {\n  return (\n    <section>\n      <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">\n        {label}\n      </h6>\n      <input\n        ref={ref}\n        className=\"block w-full text-xs text-accent-foreground border p-1 bg-card rounded outline-primary\"\n        {...inputProps}\n      />\n    </section>\n  );\n});\n\nInputField.displayName = 'InputField';\n\n"
  },
  {
    "path": "frontend/app/src/components/graph/components/JsonEditor.tsx",
    "content": "import { useActiveTheme } from '@/components/use-active-theme';\nimport * as monaco from 'monaco-editor';\nimport { forwardRef, useEffect, useImperativeHandle, useRef, useState } from 'react';\n\nexport const JsonEditor = forwardRef<monaco.editor.IStandaloneCodeEditor | undefined, { defaultValue: string, disabled?: boolean, id?: string }>(({ disabled, defaultValue, id }, forwardedRef) => {\n  const [editor, setEditor] = useState<monaco.editor.IStandaloneCodeEditor | undefined>(undefined);\n\n  useImperativeHandle(forwardedRef, () => editor, [editor]);\n\n  const theme = useActiveTheme();\n\n  const ref = useRef<HTMLDivElement>(null);\n\n  useEffect(() => {\n    // SOLUTION: instead use dynamic imports on the client side\n    if (ref) {\n      const editor = monaco.editor.create(ref.current!, {\n        value: '',\n        language: 'json',\n        automaticLayout: true,\n        lineNumbers: 'off',\n        tabSize: 2,\n        theme: theme === 'dark' ? 'vs-dark' : undefined,\n      });\n\n      setEditor(editor);\n      return () => editor.dispose();\n    }\n  }, []);\n\n  useEffect(() => {\n    if (editor) {\n      editor.updateOptions({\n        lineNumbers: disabled ? 'off' : 'on',\n        lineDecorationsWidth: disabled ? 0 : undefined,\n        readOnly: disabled,\n      });\n    }\n  }, [editor, disabled]);\n\n  useEffect(() => {\n    if (editor) {\n      if (defaultValue != null) {\n        editor.setValue(defaultValue);\n      }\n    }\n  }, [editor, defaultValue]);\n\n  useEffect(() => {\n    if (theme === 'dark') {\n      editor?.updateOptions({\n        theme: 'vs-dark',\n      });\n    } else {\n      editor?.updateOptions({\n        theme: 'vs',\n      });\n    }\n  }, [theme]);\n\n  return <div className=\"w-full h-full\" id={id} ref={ref} style={{ height: '100%', width: '100%' }}></div>;\n});\n\nJsonEditor.displayName = 'JsonEditor';\n"
  },
  {
    "path": "frontend/app/src/components/graph/components/JsonField.tsx",
    "content": "import { Loader } from '@/components/loader';\nimport { Maximize2Icon } from 'lucide-react';\nimport { editor } from 'monaco-editor';\nimport { forwardRef, lazy, type ReactNode, Suspense, useImperativeHandle, useMemo, useRef } from 'react';\n\nconst JsonEditor = lazy(() => import('./JsonEditor').then(res => ({ default: res.JsonEditor })));\n\nexport interface JsonFieldProps {\n  label: ReactNode;\n  defaultValue?: any;\n  disabled?: boolean;\n}\n\nexport interface JsonFieldInstance {\n  value: any;\n}\n\nexport const JsonField = forwardRef<JsonFieldInstance, JsonFieldProps>(({\n  label,\n  defaultValue,\n  disabled,\n}, ref) => {\n  const editorRef = useRef<editor.IStandaloneCodeEditor | null | undefined>(undefined);\n  const containerRef = useRef<HTMLDivElement>(null);\n\n  const defaultValueString = useMemo(() => {\n    return JSON.stringify(defaultValue, undefined, 2);\n  }, [defaultValue]);\n\n  const handleClickFullscreen = () => {\n    containerRef.current?.requestFullscreen();\n  };\n\n  useImperativeHandle(ref, () => ({\n    get value () {\n      const value = editorRef.current?.getValue();\n      if (value == null) {\n        return undefined;\n      }\n      return JSON.parse(value);\n    },\n    set value (value: any) {\n      editorRef.current?.setValue(JSON.stringify(value, undefined, 2));\n    },\n  }), []);\n\n  return (\n    <section>\n      <h6 className=\"text-xs font-bold text-accent-foreground mb-1 flex items-center justify-between\">\n        {label}\n        <button className=\"text-foreground/50 hover:text-foreground transition-colors\" onClick={handleClickFullscreen}>\n          <Maximize2Icon className=\"w-3 h-3\" />\n        </button>\n      </h6>\n      <div ref={containerRef} className=\"relative bg-card p-1 border rounded w-full h-80 overflow-auto focus-within:outline outline-2 -outline-offset-1 outline-primary\">\n        <Suspense fallback={<Loader loading>Initializing JSON editor...</Loader>}>\n          <JsonEditor\n            ref={editorRef}\n            disabled={disabled}\n            defaultValue={defaultValueString}\n          />\n        </Suspense>\n      </div>\n    </section>\n  );\n});\n\nJsonField.displayName = 'JsonField';"
  },
  {
    "path": "frontend/app/src/components/graph/components/LinkDetails.tsx",
    "content": "import { getRelationship } from '@/api/graph';\nimport { Loader } from '@/components/loader';\nimport { toastError, toastSuccess } from '@/lib/ui-error';\nimport { cn } from '@/lib/utils';\nimport { useContext, useEffect, useMemo, useState } from 'react';\nimport { handleServerRelationship, type Relationship } from '../utils';\nimport type { IdType } from '../network/Network';\nimport { useRemote } from '../remote';\nimport { useDirtyRelationship } from '../useDirtyRelationship';\n// import { EditingButton } from './EditingButton';\nimport { InputField } from './InputField';\nimport { JsonField } from './JsonField';\nimport { NetworkContext } from './NetworkContext';\nimport { TextareaField } from './TextareaField';\n\nconst loadRelationship = (kbId: number, id: number) => getRelationship(kbId, id).then(handleServerRelationship);\n\nexport function LinkDetails ({\n  knowledgeBaseId,\n  relationship,\n  onClickTarget,\n  onUpdate,\n  onEnterSubgraph,\n}: {\n  knowledgeBaseId: number,\n  relationship: Relationship,\n  onClickTarget?: (target: { type: string, id: IdType }) => void;\n  onUpdate?: (newRelationship: Relationship) => void;\n  onEnterSubgraph: (type: string, entityId: IdType) => void\n}) {\n  const network = useContext(NetworkContext);\n\n  const { source, target } = useMemo(() => {\n    return {\n      source: network.node(relationship.source)!,\n      target: network.node(relationship.target)!,\n    };\n  }, [network, relationship.source, relationship.target]);\n\n  const [editing, setEditing] = useState(false);\n  const latestData = useRemote(relationship, loadRelationship, knowledgeBaseId, Number(relationship.id));\n  const dirtyRelationship = useDirtyRelationship(knowledgeBaseId, relationship.id);\n\n  relationship = latestData.data;\n\n  const handleSave = () => {\n    void dirtyRelationship.save()\n      .then((newRelationshipData) => {\n        setEditing(false);\n        onUpdate?.(latestData.mutate(prev => Object.assign({}, prev, newRelationshipData)));\n        toastSuccess('Successfully saved.');\n      })\n      .catch((error: any) => {\n        toastError('Failed to save relationship', error);\n      });\n  };\n\n  const handleReset = () => {\n    dirtyRelationship.resetSave();\n    dirtyRelationship.reset(relationship);\n    setEditing(false);\n  };\n\n  useEffect(() => {\n    handleReset();\n  }, [latestData.data]);\n  onUpdate?.(latestData.data);\n\n  const busy = dirtyRelationship.saving || latestData.revalidating;\n  const controlsDisabled = !editing || busy;\n\n  return (\n    <div className=\"p-2 space-y-4 h-full overflow-y-auto\">\n      <div className=\"flex items-center justify-between\">\n        <span className=\"text-sm text-muted-foreground font-normal \">\n          <b>#{relationship.id}</b> relationship\n        </span>\n        {/*<EditingButton editing={editing} onStartEdit={() => setEditing(true)} onSave={handleSave} onReset={handleReset} busy={busy} onEnterSubgraph={() => onEnterSubgraph('document', relationship.meta.doc_id)} subGraphTitle=\"Document subgraph\" />*/}\n      </div>\n      {relationship.meta.doc_id && <section>\n        <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">Document URI</h6>\n        <p className=\"block w-full text-xs text-accent-foreground\">\n          <a className=\"underline\" href={relationship.meta.source_uri} target=\"_blank\">{relationship.meta.source_uri}</a>\n        </p>\n      </section>}\n      <TextareaField label=\"Description\" ref={dirtyRelationship.descriptionRef} defaultValue={relationship.description} disabled={controlsDisabled} />\n      <InputField label=\"Weight\" ref={dirtyRelationship.weightRef} defaultValue={relationship.weight} disabled={controlsDisabled} min={0} step={1} type=\"number\" />\n      <JsonField label=\"meta\" ref={dirtyRelationship.metaRef} defaultValue={relationship.meta} disabled={controlsDisabled} />\n      <section className=\"space-y-2\">\n        <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">Source</h6>\n        <div className={cn('text-sm cursor-pointer transition-all hover:text-primary', editing && 'pointer-events-none opacity-50')} onClick={() => !editing && onClickTarget?.({ type: 'node', id: source.id })}>{source.name} <span className=\"text-muted-foreground\">#{source.id}</span></div>\n        <p className={cn('text-xs text-accent-foreground max-h-40 overflow-y-auto border p-1 bg-card rounded', editing && 'opacity-50')}>{source.description}</p>\n      </section>\n      <section className=\"space-y-2\">\n        <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">Target</h6>\n        <div className={cn('text-sm cursor-pointer transition-all hover:text-primary', editing && 'pointer-events-none opacity-50')} onClick={() => !editing && onClickTarget?.({ type: 'node', id: target.id })}>{target.name} <span className=\"text-muted-foreground\">#{target.id}</span></div>\n        <p className={cn('text-xs text-accent-foreground max-h-40 overflow-y-auto border p-1 bg-card rounded', editing && 'opacity-50')}>{target.description}</p>\n      </section>\n      <Loader loading={latestData.revalidating}>\n        Loading relationship #{relationship.id}\n      </Loader>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/graph/components/NetworkCanvas.tsx",
    "content": "import type { IdType, NetworkLink, NetworkNode, ReadonlyNetwork } from '../network/Network';\nimport { useEffect, useRef, useState } from 'react';\n\nimport { CanvasNetworkRenderer } from '../network/CanvasNetworkRenderer';\nimport type { NetworkRendererOptions } from '../network/NetworkRendererOptions';\n\nexport interface NetworkCanvasProps<Node extends NetworkNode, Link extends NetworkLink> extends NetworkRendererOptions<Node, Link> {\n  network: ReadonlyNetwork<Node, Link>;\n  target: { type: string, id: IdType } | undefined;\n  className?: string;\n}\n\nexport function NetworkCanvas<Node extends NetworkNode, Link extends NetworkLink> ({ className, network, target, ...options }: NetworkCanvasProps<Node, Link>) {\n  const ref = useRef<HTMLDivElement>(null);\n  const [renderer, setRenderer] = useState<CanvasNetworkRenderer<Node, Link>>();\n\n  useEffect(() => {\n    // Cleanup previous renderer if it exists\n    if (renderer) {\n      renderer.unmount();\n    }\n\n    const newRenderer = new CanvasNetworkRenderer(network, options);\n    \n    if (ref.current) {\n      newRenderer.mount(ref.current);\n    }\n    setRenderer(newRenderer);\n\n    return () => {\n      newRenderer.unmount();\n      setRenderer(undefined);\n    };\n  }, [network]);\n\n  useEffect(() => {\n    if (!renderer) {\n      return;\n    }\n    if (!target) {\n      renderer.blurNode();\n      renderer.blurLink();\n      return;\n    }\n    switch (target.type) {\n      case 'node':\n        renderer.focusNode(target.id);\n        break;\n      case 'link':\n        renderer.focusLink(target.id);\n        break;\n    }\n  }, [target, renderer]);\n\n  return (\n    <div className={className} ref={ref} />\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/graph/components/NetworkContext.ts",
    "content": "import { createContext } from 'react';\nimport type { Entity, Relationship } from '../utils';\nimport { BaseNetwork, type ReadonlyNetwork } from '../network/Network';\n\nexport const NetworkContext = createContext<ReadonlyNetwork<Entity, Relationship>>(new BaseNetwork());\n"
  },
  {
    "path": "frontend/app/src/components/graph/components/NetworkViewer.tsx",
    "content": "import { Loader } from '@/components/loader';\nimport { cn } from '@/lib/utils';\nimport { type FC, type ReactNode, useMemo, useState } from 'react';\nimport { NetworkCanvas } from '../components/NetworkCanvas';\nimport { NetworkContext } from '../components/NetworkContext';\nimport { BaseNetwork, type IdType } from '../network/Network';\nimport type { NetworkRendererOptions } from '../network/NetworkRendererOptions';\nimport { type Entity, type Relationship } from '../utils';\n\nexport interface NetworkViewerProps {\n  className?: string;\n  network: BaseNetwork<Entity, Relationship>;\n  loading: boolean;\n  loadingTitle: ReactNode;\n  Details: FC<NetworkViewerDetailsProps>;\n}\n\nexport interface NetworkViewerDetailsProps {\n  network: BaseNetwork<Entity, Relationship>,\n  target: { type: string, id: IdType } | undefined,\n  onTargetChange: ((target: { type: string, id: IdType } | undefined) => void)\n}\n\nfunction randomPosition (radius: number, kbSpacing: number, kbIndex: number, kbCount: number) {\n  const x = kbIndex * kbSpacing - (kbCount - 1) * kbSpacing / 2;\n  const theta = Math.random() * 2 * Math.PI;\n\n  return {\n    x: x + radius * Math.cos(theta),\n    y: radius * Math.sin(theta),\n  };\n}\n\nexport function NetworkViewer ({ network, loading, loadingTitle, className, Details }: NetworkViewerProps) {\n  const [target, setTarget] = useState<{ type: string, id: IdType }>();\n\n  const knowledgeGraphIndexMap = useMemo(() => {\n    const nodes = network.nodes();\n    const kbIds = Array.from(nodes.reduce((acc, node) => acc.add(node.knowledge_base_id ?? 0), new Set<number>()));\n    kbIds.sort();\n\n    return new Map(kbIds.map((kbId, index) => ([kbId, index])));\n  }, [network]);\n\n  const networkOptions: NetworkRendererOptions<Entity, Relationship> = {\n    showId: true,\n    getNodeInitialAttrs: (node) => {\n      const kbIndex = knowledgeGraphIndexMap.get(node.knowledge_base_id ?? 0) ?? 0;\n      return randomPosition(20, 100, kbIndex, knowledgeGraphIndexMap.size || 1);\n    },\n    getNodeLabel: node => node.name,\n    getNodeDetails: node => node.description,\n    getNodeRadius: node => Math.pow(Math.log(1 + (network.nodeNeighborhoods(node.id)?.size ?? 0)) / Math.log(2), 2) * 2 + 5,\n    getNodeColor: node => {\n      if (node.entity_type === 'synopsis') {\n        return `hsl(var(--brand1-foreground))`;\n      } else {\n        const kbIndex = knowledgeGraphIndexMap.get(node.knowledge_base_id ?? 0);\n        if (!kbIndex) {\n          return `hsl(var(--primary))`;\n        } else {\n          return `hsl(var(--chart-${kbIndex + 1}))`;\n        }\n      }\n    },\n    getNodeStrokeColor: node => {\n      if (node.entity_type === 'synopsis') {\n        return `hsl(var(--brand1))`;\n      } else {\n        const kbIndex = knowledgeGraphIndexMap.get(node.knowledge_base_id ?? 0);\n        if (!kbIndex) {\n          return `hsl(var(--primary))`;\n        } else {\n          return `hsl(var(--chart-${kbIndex + 1}))`;\n        }\n      }\n    },\n    getNodeLabelColor: node => {\n      if (node.entity_type === 'synopsis') {\n        return `hsl(var(--brand1))`;\n      } else {\n        return `hsl(var(--primary))`;\n      }\n    },\n    getNodeLabelStrokeColor: node => {\n      if (node.entity_type === 'synopsis') {\n        return `hsl(var(--brand1-foreground))`;\n      } else {\n        return `hsl(var(--primary-foreground))`;\n      }\n    },\n    getNodeMeta: node => node.meta,\n    getLinkColor: link => {\n      if (link.meta.relationship_type === 'synopsis') {\n        return `hsl(var(--brand1) / 50%)`;\n      } else {\n        const kbIndex = knowledgeGraphIndexMap.get(link.knowledge_base_id ?? 0);\n        if (!kbIndex) {\n          return `hsl(var(--primary) / 50%)`;\n        } else {\n          return `hsl(var(--chart-${kbIndex + 1}) / 50%)`;\n        }\n      }\n    },\n    getLinkLabel: link => {\n      const source = network.node(link.source)!;\n      const target = network.node(link.target)!;\n      return link.description\n        .replace(source.name + ' -> ', '')\n        .replace(' -> ' + target.name, '');\n    },\n    getLinkDetails: link => link.description,\n    getLinkMeta: link => link.meta,\n    getLinkLabelColor: (link) => {\n      if (link.meta.relationship_type === 'synopsis') {\n        return `hsl(var(--brand1) / 50%)`;\n      } else {\n        const kbIndex = knowledgeGraphIndexMap.get(link.knowledge_base_id ?? 0);\n        if (!kbIndex) {\n          return `hsl(var(--primary) / 50%)`;\n        } else {\n          return `hsl(var(--chart-${kbIndex + 1}) / 50%)`;\n        }\n      }\n    },\n    getLinkLabelStrokeColor: () => {\n      return `hsl(var(--primary-foreground) / 50%)`;\n    },\n\n    onClickNode: (node) => {\n      setTarget({ type: 'node', id: node.id });\n    },\n    onClickLink: (link) => {\n      setTarget({ type: 'link', id: link.id });\n    },\n    onClickCanvas: () => {\n      setTarget(undefined);\n    },\n  };\n\n  return (\n    <NetworkContext.Provider value={network}>\n      <div className={cn('relative', className)}>\n        <NetworkCanvas\n          className={cn('w-full h-full overflow-hidden')}\n          network={network}\n          target={target}\n          {...networkOptions}\n        />\n        <Details\n          network={network}\n          target={target}\n          onTargetChange={setTarget}\n        />\n        <Loader loading={loading}>\n          {loadingTitle}\n        </Loader>\n      </div>\n    </NetworkContext.Provider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/graph/components/NodeDetails.tsx",
    "content": "import { getEntity } from '@/api/graph';\nimport { Loader } from '@/components/loader';\nimport { toastError, toastSuccess } from '@/lib/ui-error';\nimport { cn } from '@/lib/utils';\nimport { useContext, useEffect, useMemo, useState } from 'react';\nimport type { IdType } from '../network/Network';\nimport { useRemote } from '../remote';\nimport { useDirtyEntity } from '../useDirtyEntity';\nimport { type Entity, handleServerEntity } from '../utils';\nimport { EditingButton } from './EditingButton';\nimport { InputField } from './InputField';\nimport { JsonField } from './JsonField';\nimport { NetworkContext } from './NetworkContext';\nimport { TextareaField } from './TextareaField';\n\nconst loadEntity = (kbId: number, id: number) => getEntity(kbId, id).then(handleServerEntity);\n\nexport function NodeDetails ({\n  knowledgeBaseId,\n  entity,\n  onClickTarget,\n  onUpdate,\n  onEnterSubgraph,\n}: {\n  knowledgeBaseId: number,\n  entity: Entity,\n  onClickTarget?: (target: { type: string, id: IdType }) => void;\n  onUpdate?: (newData: Entity) => void\n  onEnterSubgraph: (type: string, entityId: IdType) => void\n}) {\n  const [editing, setEditing] = useState(false);\n  const network = useContext(NetworkContext);\n\n  const neighbors = useMemo(() => {\n    return Array.from(network.nodeNeighborhoods(entity.id) ?? []).map(id => network.node(id)!);\n  }, [network, entity.id]);\n\n  const latestData = useRemote(entity, loadEntity, knowledgeBaseId, Number(entity.id));\n  const dirtyEntity = useDirtyEntity(knowledgeBaseId, entity.id);\n\n  // dirty set\n  entity = latestData.data;\n\n  const handleSave = () => {\n    void dirtyEntity.save()\n      .then((newEntityData) => {\n        setEditing(false);\n        onUpdate?.(latestData.mutate(prev => Object.assign({}, prev, newEntityData)));\n        toastSuccess('Successfully saved.');\n      })\n      .catch((error: any) => {\n        toastError('Failed to save entity', error);\n      });\n  };\n\n  const handleReset = () => {\n    dirtyEntity.resetSave();\n    dirtyEntity.reset(entity);\n    setEditing(false);\n  };\n\n  useEffect(() => {\n    handleReset();\n    onUpdate?.(latestData.data);\n  }, [latestData.data]);\n\n  const busy = dirtyEntity.saving || latestData.revalidating;\n  const controlsDisabled = !editing || busy;\n\n  return (\n    <div className=\"p-4 space-y-4 h-full overflow-y-auto\">\n      <div className=\"flex items-center justify-between\">\n        <span className=\"text-sm text-muted-foreground font-normal \">\n          <b>#{entity.id}</b> {entity.entity_type} entity\n        </span>\n        <EditingButton onEnterSubgraph={() => onEnterSubgraph('entity', entity.id)} editing={editing} onStartEdit={() => setEditing(true)} onSave={handleSave} onReset={handleReset} busy={busy} />\n      </div>\n      {entity.synopsis_info?.topic && <section>\n        <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">Synopsis topic</h6>\n        <p className=\"block w-full text-xs text-accent-foreground\">\n          {entity.synopsis_info.topic}\n        </p>\n      </section>}\n      <InputField label=\"Name\" ref={dirtyEntity.nameRef} defaultValue={entity.name} disabled={controlsDisabled} />\n      <TextareaField label=\"Description\" ref={dirtyEntity.descriptionRef} defaultValue={entity.description} disabled={controlsDisabled} />\n      <JsonField label=\"Meta\" ref={dirtyEntity.metaRef} defaultValue={entity.meta} disabled={controlsDisabled} />\n      <section>\n        <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">Neighborhoods</h6>\n        <ul className={cn('w-full max-h-40 overflow-y-auto bg-card rounded border transition-opacity', editing && 'opacity-50 pointer-events-none')}>\n          {neighbors.map(entity => (\n            <li\n              key={entity.id}\n              className={'text-xs p-1 border-b last-of-type:border-b-0 cursor-pointer hover:text-primary hover:bg-primary/10 transition-colors'}\n              onClick={() => {\n                if (!editing) {\n                  onClickTarget?.({ type: 'node', id: entity.id });\n                }\n              }}\n            >\n              {entity.name}\n              <span className=\"text-muted-foreground\">\n                {' '}#{entity.id}\n              </span>\n            </li>\n          ))}\n        </ul>\n      </section>\n      <Loader loading={latestData.revalidating}>\n        Loading entity #{entity.id}\n      </Loader>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/graph/components/SearchEntity.tsx",
    "content": "import { EntitiesTable, entityColumns } from '@/components/graph/components/EntitiesTable';\nimport { type SearchEntityFilter, type UseEntitiesRequired, useGraphEntitiesTable } from '@/components/graph/selectEntities';\nimport { Button } from '@/components/ui/button';\nimport { Dialog, DialogContent, DialogHeader, DialogPortal, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { Form, FormControl, FormField } from '@/components/ui/form';\nimport { Input } from '@/components/ui/input';\nimport { cn } from '@/lib/utils';\nimport { zodResolver } from '@hookform/resolvers/zod';\nimport isHotkey from 'is-hotkey';\nimport { SearchIcon } from 'lucide-react';\nimport { useForm } from 'react-hook-form';\nimport z from 'zod';\n\nexport interface SearchEntityProps extends UseEntitiesRequired {\n  knowledgeBaseId: number\n}\n\n/**\n * @deprecated\n */\nexport function SearchEntity ({ knowledgeBaseId, ...props }: SearchEntityProps) {\n  const { table, filter, setFilter, isLoading, error } = useGraphEntitiesTable(knowledgeBaseId, {\n    columns: entityColumns,\n    ...props,\n  });\n\n  return (\n    <Dialog>\n      <DialogTrigger asChild>\n        <Button variant='secondary'>\n          <SearchIcon className=\"mr-2 w-3.5 h-3.5\" />\n          Search entities\n        </Button>\n      </DialogTrigger>\n      <DialogPortal>\n        <DialogContent className=\"overflow-x-hidden max-w-screen-md space-y-2\">\n          <DialogHeader>\n            <DialogTitle>\n              Search entities\n            </DialogTitle>\n          </DialogHeader>\n          <TableFilterForm filter={filter} onFilterChange={setFilter} />\n          <EntitiesTable table={table} isLoading={isLoading} />\n        </DialogContent>\n      </DialogPortal>\n    </Dialog>\n  );\n}\n\nfunction TableFilterForm ({ className, filter, onFilterChange, disabled }: { className?: string, filter: SearchEntityFilter, onFilterChange: (filter: SearchEntityFilter) => void, disabled?: boolean }) {\n  const form = useForm<SearchEntityFilter>({\n    values: filter,\n    resolver: zodResolver(tableFilterSchema),\n    disabled,\n    defaultValues: {\n      query: '',\n      top_k: undefined,\n    },\n  });\n\n  return (\n    <Form {...form}>\n      <form className={cn('flex gap-2 items-center', className)} onSubmit={event => {\n        event.stopPropagation();\n        event.preventDefault();\n        return false;\n      }}>\n        <FormField\n          name=\"query\"\n          render={({ field }) => (\n            <FormControl>\n              <Input\n                {...field}\n                placeholder=\"Input your query...\"\n                onKeyDown={ev => {\n                  if (isHotkey('Enter', ev)) {\n                    ev.stopPropagation();\n                    ev.preventDefault();\n                    onFilterChange(form.getValues());\n                  }\n                }}\n              />\n            </FormControl>\n          )}\n        />\n      </form>\n    </Form>\n  );\n}\n\nconst tableFilterSchema = z.object({\n  query: z.string(),\n  top_k: z.coerce.number().optional(),\n});"
  },
  {
    "path": "frontend/app/src/components/graph/components/SearchEntityById.tsx",
    "content": "import { EntitiesTable, entityColumns } from '@/components/graph/components/EntitiesTable';\nimport { type UseEntitiesRequired, useGraphEntitiesByIdsTable } from '@/components/graph/selectEntities';\nimport { Button } from '@/components/ui/button';\nimport { Dialog, DialogContent, DialogHeader, DialogPortal, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { Form, FormControl, FormField } from '@/components/ui/form';\nimport { Input } from '@/components/ui/input';\nimport { cn } from '@/lib/utils';\nimport { zodResolver } from '@hookform/resolvers/zod';\nimport isHotkey from 'is-hotkey';\nimport { PlusIcon } from 'lucide-react';\nimport { useForm } from 'react-hook-form';\nimport z from 'zod';\n\nexport interface SearchEntityProps extends UseEntitiesRequired {\n  knowledgeBaseId: number\n}\n\n/**\n * @deprecated\n */\nexport function SearchEntityById ({ knowledgeBaseId, ...props }: SearchEntityProps) {\n  const { table, ids, setIds } = useGraphEntitiesByIdsTable(knowledgeBaseId, {\n    columns: entityColumns,\n    ...props,\n  });\n\n  return (\n    <Dialog>\n      <DialogTrigger asChild>\n        <Button variant='secondary'>\n          <PlusIcon className=\"mr-2 w-3.5 h-3.5\" />\n          Entities by IDs\n        </Button>\n      </DialogTrigger>\n      <DialogPortal>\n        <DialogContent className=\"overflow-x-hidden max-w-screen-md space-y-2\">\n          <DialogHeader>\n            <DialogTitle>\n              Search entities by IDs\n            </DialogTitle>\n          </DialogHeader>\n          <TableFilterForm filter={ids} onFilterChange={setIds} />\n          <EntitiesTable table={table} isLoading={false} />\n        </DialogContent>\n      </DialogPortal>\n    </Dialog>\n  );\n}\n\nfunction TableFilterForm ({ className, filter, onFilterChange, disabled }: { className?: string, filter: number[], onFilterChange: (filter: number[]) => void, disabled?: boolean }) {\n  const form = useForm<{ ids: string }>({\n    values: { ids: filter.map(i => String(i)).join(', ') },\n    resolver: zodResolver(tableFilterSchema),\n    disabled,\n    defaultValues: {\n      ids: '',\n    },\n  });\n\n  const populate = () => {\n    const ids = form.getValues().ids.split(',')\n      .map(s => s.trim())\n      .filter(Boolean)\n      .map(s => parseInt(s))\n      .filter(n => isFinite(n));\n    onFilterChange(ids);\n  };\n\n  return (\n    <Form {...form}>\n      <form className={cn('flex gap-2 items-center', className)} onSubmit={event => {\n        event.stopPropagation();\n        event.preventDefault();\n        return false;\n      }}>\n        <FormField\n          name=\"ids\"\n          render={({ field }) => (\n            <FormControl>\n              <Input\n                {...field}\n                placeholder=\"Input entities ids separated by ','\"\n                onKeyDown={ev => {\n                  if (isHotkey('Enter', ev)) {\n                    ev.stopPropagation();\n                    ev.preventDefault();\n                    populate();\n                  }\n                }}\n              />\n            </FormControl>\n          )}\n        />\n      </form>\n    </Form>\n  );\n}\n\nconst tableFilterSchema = z.object({\n  ids: z.string(),\n});"
  },
  {
    "path": "frontend/app/src/components/graph/components/TextareaField.tsx",
    "content": "import { forwardRef, type ReactNode, type TextareaHTMLAttributes } from 'react';\n\nexport interface TextareaFieldProps extends Omit<TextareaHTMLAttributes<HTMLTextAreaElement>, 'className'> {\n  label: ReactNode;\n}\n\nexport const TextareaField = forwardRef<HTMLTextAreaElement, TextareaFieldProps>(({\n  label, ...textareaProps\n}, ref) => {\n  return (\n    <section>\n      <h6 className=\"text-xs font-bold text-accent-foreground mb-1\">{label}</h6>\n      <textarea\n        ref={ref}\n        className=\"block w-full min-h-40 text-xs text-accent-foreground overflow-y-auto border p-1 bg-card rounded outline-primary\"\n        {...textareaProps}\n      />\n    </section>\n  );\n});\n\nTextareaField.displayName = 'TextareaField';\n"
  },
  {
    "path": "frontend/app/src/components/graph/index.ts",
    "content": ""
  },
  {
    "path": "frontend/app/src/components/graph/network/CanvasNetworkRenderer.ts",
    "content": "import * as d3 from 'd3';\n\nimport type { IdType, NetworkLink, NetworkNode, ReadonlyNetwork } from './Network';\nimport type { SimulationLinkDatum, SimulationNodeDatum } from 'd3';\n\nimport ForceGraph from 'force-graph';\nimport type { NetworkRendererOptions } from './NetworkRendererOptions';\n\nexport interface NetworkNodeView extends SimulationNodeDatum {\n  id: IdType;\n  index: number;\n  radius: number;\n  label?: string;\n  details?: string;\n  meta?: any;\n}\n\nexport interface NetworkLinkView extends SimulationLinkDatum<NetworkNodeView> {\n  id: IdType;\n  index: number;\n  source: NetworkNodeView;\n  target: NetworkNodeView;\n  label?: string;\n  details?: string;\n  meta?: any;\n}\n\nexport class CanvasNetworkRenderer<Node extends NetworkNode, Link extends NetworkLink> {\n  private _el: HTMLElement | undefined;\n  private _graph: any; // ForceGraph instance\n  private _ro: ResizeObserver | undefined;\n\n  private _onUpdateLink: ((id: IdType) => void) | undefined;\n  private _onUpdateNode: ((id: IdType) => void) | undefined;\n\n  private nodes: NetworkNodeView[] = [];\n  private links: NetworkLinkView[] = [];\n\n  // Graph state\n  private selectedNode: NetworkNodeView | null = null;\n  private selectedLink: NetworkLinkView | null = null;\n  private highlightedNodes = new Set<IdType>();\n  private highlightedLinks = new Set<IdType>();\n\n  private readonly linkDefaultDistance = 30;\n  private readonly chargeDefaultStrength = -80;\n  private readonly linkHighlightDistance = 120;\n  private readonly chargeHighlightStrength = -200;\n  private readonly linkDefaultWidth = 1;\n\n  private clustersCalculated = false;\n  \n  private adjacencyMap = new Map<IdType, { connectedNodes: Set<IdType>, connectedLinks: Set<IdType> }>();\n  private adjacencyCalculated = false;\n\n  scale = 1;\n  private initialLayoutComplete = false;\n\n  private viewportBounds = { x0: -Infinity, y0: -Infinity, x1: Infinity, y1: Infinity };\n\n  private colors = {\n    textColor: '#000000',\n    nodeHighlighted: '#18a0b1',\n    nodeSelected: '#72fefb',\n    linkDefaultColor: '#999999',\n    linkHighlighted: '#18a0b1',\n    linkSelected: '#72fefb'\n  };\n  private zoomLevels = {\n    one: 0.1,\n    two: 0.2,\n    three: 0.3,\n    four: 0.4,\n    five: 0.8,\n  }\n\n  constructor(\n    private network: ReadonlyNetwork<Node, Link>,\n    private options: NetworkRendererOptions<Node, Link>,\n  ) {\n    this.compile(options);\n  }\n\n  private compile(options: NetworkRendererOptions<Node, Link>) {\n    const nodeMap = new Map<IdType, number>();\n    this.nodes = this.network.nodes().map((node, index) => {\n      const nodeRadius = 8;\n      const fontSize = Math.max(8, nodeRadius * 0.3);\n      const label = options.getNodeLabel?.(node) ?? (node as any).name ?? node.id;\n      const labelColor = options.getNodeLabelColor?.(node) ?? this.colors.textColor;\n      \n      nodeMap.set(node.id, index);\n      return {\n        id: node.id,\n        index,\n        radius: nodeRadius, \n        label,\n        details: options.getNodeDetails?.(node),\n        meta: options.getNodeMeta?.(node),\n        fontSize,\n        fontString: `${fontSize}px Sans-Serif`,\n        labelColor,\n        ...options.getNodeInitialAttrs?.(node, index),\n      };\n    });\n    this.links = this.network.links().map((link, index) => ({\n      id: link.id,\n      index,\n      source: this.nodes[nodeMap.get(link.source)!],\n      target: this.nodes[nodeMap.get(link.target)!],\n      label: options.getLinkLabel?.(link),\n      details: options.getLinkDetails?.(link),\n      meta: options.getLinkMeta?.(link),\n    }));\n  }\n\n  private updateViewportBounds() {\n    if (!this._graph || !this._el) return;\n    \n    const canvas = this._el.querySelector('canvas');\n    if (!canvas) return;\n\n    const width = canvas.width;\n    const height = canvas.height;\n    \n    const topLeft = this._graph.screen2GraphCoords(0, 0);\n    const bottomRight = this._graph.screen2GraphCoords(width, height);\n    \n    const padding = 100 / this.scale;\n    \n    this.viewportBounds = {\n      x0: topLeft.x - padding,\n      y0: topLeft.y - padding,\n      x1: bottomRight.x + padding,\n      y1: bottomRight.y + padding\n    };\n  }\n\n  private isNodeInViewport(node: any): boolean {\n    const x = node.x ?? 0;\n    const y = node.y ?? 0;\n    return x >= this.viewportBounds.x0 && \n           x <= this.viewportBounds.x1 && \n           y >= this.viewportBounds.y0 && \n           y <= this.viewportBounds.y1;\n  }\n\n  private isLinkInViewport(link: any): boolean {\n    const sourceX = link.source.x ?? 0;\n    const sourceY = link.source.y ?? 0;\n    const targetX = link.target.x ?? 0;\n    const targetY = link.target.y ?? 0;\n    \n    if ((sourceX < this.viewportBounds.x0 && targetX < this.viewportBounds.x0) ||\n        (sourceX > this.viewportBounds.x1 && targetX > this.viewportBounds.x1) ||\n        (sourceY < this.viewportBounds.y0 && targetY < this.viewportBounds.y0) ||\n        (sourceY > this.viewportBounds.y1 && targetY > this.viewportBounds.y1)) {\n      return false;\n    }\n    \n    return true;\n  }\n\n  mount(container: HTMLElement) {\n    if (this._el) {\n      return;\n    }\n    this._el = container;\n\n    const { width: initialWidth, height: initialHeight } = container.getBoundingClientRect();\n\n    const graph = new ForceGraph(container)\n      .width(initialWidth)\n      .height(initialHeight)\n      .backgroundColor('transparent')\n      .autoPauseRedraw(false)\n      .warmupTicks(50)\n      .nodeAutoColorBy('clusterId')\n      .nodeCanvasObject((node: any, ctx: CanvasRenderingContext2D, globalScale: number) => {\n        if (this.isNodeInViewport(node)) {\n          this.drawNodeWithLabel(node, ctx, globalScale);\n        }\n      })\n      .linkCanvasObject((link: any, ctx: CanvasRenderingContext2D, globalScale: number) => {\n        this.scale = globalScale;\n        if (this.scale > this.zoomLevels.three && this.isLinkInViewport(link)) {\n          this.drawLink(link, ctx);\n        }\n      })\n      .linkCanvasObjectMode(() => 'replace')\n      .onNodeClick((node: any, event: MouseEvent) => {\n        this.onNodeClick(node, event);\n      })\n      .onLinkClick((link: any, event: MouseEvent) => {\n        this.onLinkClick(link, event);\n      })\n      .onBackgroundClick(() => {\n        this.onBackgroundClick();\n      })\n      .d3Force('x', d3.forceX(0).strength(0.05))\n      .d3Force('y', d3.forceY(0).strength(0.05))\n      .d3Force(\"link\", d3.forceLink().id((d: any) => d.id).distance(this.linkDefaultDistance))\n      .d3Force(\"charge\", d3.forceManyBody()\n        .strength(this.chargeDefaultStrength)\n        .theta(1.2)\n      )\n      .onZoom((transform: any) => {\n        this.scale = transform.k;\n      })\n      .onRenderFramePre(() => {\n        this.updateViewportBounds();\n      });\n\n    this._graph = graph;\n\n    setTimeout(() => {\n      this.initialLayoutComplete = true;\n      graph.d3Force('x', null);\n      graph.d3Force('y', null);\n      graph.d3Force(\"charge\")?.distanceMax(300).strength(0);\n\n      const data = graph.graphData();\n      data.nodes.forEach((node: any) => {\n        node.fx = node.x;\n        node.fy = node.y;\n      });\n    }, 2000);\n\n    container.style.overflow = 'hidden';\n\n    this._ro = new ResizeObserver(([entry]) => {\n      const { width, height } = entry.contentRect;\n      this._graph.width(width).height(height);\n    });\n    this._ro.observe(container);\n\n    this.render();\n\n    setTimeout(() => {\n      const canvas = container.querySelector('canvas');\n      if (canvas) {\n        canvas.style.width = '100%';\n        canvas.style.height = '100%';\n        canvas.style.display = 'block';\n      }\n    }, 0);\n  }\n\n  unmount() {\n    if (this._onUpdateLink) {\n      this.network.off('update:link', this._onUpdateLink);\n      this._onUpdateLink = undefined;\n    }\n    if (this._onUpdateNode) {\n      this.network.off('update:node', this._onUpdateNode);\n      this._onUpdateNode = undefined;\n    }\n    if (!this._el) {\n      return;\n    }\n\n    if (this._graph) {\n      this._graph.onNodeClick(null);\n      this._graph.onLinkClick(null);\n      this._graph.onBackgroundClick(null);\n      \n      this._graph.graphData({ nodes: [], links: [] });\n      \n      if (this._el) {\n        this._el.innerHTML = '';\n      }\n      \n      this._graph = undefined;\n    }\n    \n    this._ro?.disconnect();\n    this._ro = undefined;\n    this._el = undefined;\n  }\n\n  private drawNodeWithLabel(node: any, ctx: CanvasRenderingContext2D, globalScale: number) {\n    const nodeRadius = 8;\n    const largeNodeRadius = 16;\n    \n    // Use different rendering based on zoom level\n    if (globalScale < this.zoomLevels.one) {\n      ctx.fillStyle = node.color;\n      ctx.fillRect(node.x - nodeRadius/2, node.y - nodeRadius/2, largeNodeRadius, largeNodeRadius);\n      return;\n    }\n\n    // Full circle rendering\n    ctx.beginPath();\n    ctx.arc(node.x, node.y, nodeRadius, 0, 2 * Math.PI, false);\n    ctx.fillStyle = node.color;\n    ctx.fill();\n\n    // Selection/highlight strokes\n    if (this.selectedNode && this.selectedNode.id === node.id) {\n      ctx.strokeStyle = this.colors.nodeSelected;\n      ctx.lineWidth = 3;\n      ctx.stroke();\n    } else if (this.highlightedNodes.has(node.id)) {\n      ctx.strokeStyle = this.colors.nodeHighlighted;\n      ctx.lineWidth = 3;\n      ctx.stroke();\n    }\n\n    // Labels only when zoomed in enough\n    if (globalScale >= this.zoomLevels.five) {\n      ctx.font = node.fontString;\n      ctx.textAlign = 'center';\n      ctx.textBaseline = 'middle';\n      ctx.fillStyle = node.labelColor;\n      ctx.fillText(node.label, node.x, node.y + nodeRadius + node.fontSize * 0.7);\n    }\n  }\n\n  private drawLink(link: any, ctx: CanvasRenderingContext2D) {\n    const source = link.source;\n    const target = link.target;\n    \n    // Determine link color\n    let color = this.colors.linkDefaultColor;\n    if (this.selectedLink && this.selectedLink.id === link.id) {\n      color = this.colors.linkSelected;\n    } else if (this.highlightedLinks.has(link.id)) {\n      color = this.colors.linkHighlighted;\n    }\n    \n    ctx.strokeStyle = color;\n    ctx.lineWidth = Math.min(this.linkDefaultWidth * this.scale, 1);\n    \n    ctx.beginPath();\n    ctx.moveTo(source.x, source.y);\n    ctx.lineTo(target.x, target.y);\n    ctx.stroke();\n    \n    if (this.scale > this.zoomLevels.four) {\n      this.drawArrow(ctx, source, target, color);\n    }\n  }\n\n  private drawArrow(\n    ctx: CanvasRenderingContext2D,\n    source: any,\n    target: any,\n    color: string\n  ) {\n    const arrowLength = 6;\n    const arrowAngle = Math.PI / 6;\n    \n    const dx = target.x - source.x;\n    const dy = target.y - source.y;\n    const angle = Math.atan2(dy, dx);\n    \n    const targetRadius = target.radius || 8;\n    const arrowX = target.x - Math.cos(angle) * targetRadius;\n    const arrowY = target.y - Math.sin(angle) * targetRadius;\n    \n    ctx.fillStyle = color;\n    ctx.beginPath();\n    ctx.moveTo(arrowX, arrowY);\n    ctx.lineTo(\n      arrowX - arrowLength * Math.cos(angle - arrowAngle),\n      arrowY - arrowLength * Math.sin(angle - arrowAngle)\n    );\n    ctx.lineTo(\n      arrowX - arrowLength * Math.cos(angle + arrowAngle),\n      arrowY - arrowLength * Math.sin(angle + arrowAngle)\n    );\n    ctx.closePath();\n    ctx.fill();\n  }\n\n  private onNodeClick(node: any, event: MouseEvent) {\n    this.selectedNode = node;\n    this.selectedLink = null;\n    \n    this.options.onClickNode?.(this.network.node(node.id)!, event);\n    this.highlightConnections(node);\n  }\n\n  private onLinkClick(link: any, event: MouseEvent) {\n    this.selectedLink = link;\n    this.selectedNode = null;\n    \n    this.options.onClickLink?.(this.network.link(link.id)!, event);\n    this.highlightLink(link);\n  }\n\n  private onBackgroundClick() {\n    this.options.onClickCanvas?.();\n    this.clearHighlight();\n  }\n\n  private highlightLink(link: any) {\n    this.highlightedLinks.clear();\n    this.highlightedLinks.add(link.id);\n  }\n\n  private highlightConnections(node: any) {\n    const adjacency = this.adjacencyMap.get(node.id);\n    if (!adjacency) {\n      return;\n    }\n    \n    const connectedNodeIds = adjacency.connectedNodes;\n    const connectedLinkIds = adjacency.connectedLinks;\n    \n    this.highlightedNodes.clear();\n    connectedNodeIds.forEach(nodeId => this.highlightedNodes.add(nodeId));\n    \n    this.highlightedLinks.clear();\n    connectedLinkIds.forEach(linkId => this.highlightedLinks.add(linkId));\n\n    const data = this._graph.graphData();\n    data.nodes.forEach((n: any) => {\n      if (connectedNodeIds.has(n.id)) {\n        n.fx = null;\n        n.fy = null;\n      }\n    });\n    \n    this._graph.d3Force(\"link\").distance((link: any) => {\n      if (connectedLinkIds.has(link.id)) {\n        return this.linkHighlightDistance;\n      }\n      return this.linkDefaultDistance;\n    });\n\n    this._graph.d3Force(\"charge\").strength((node: any) => {\n      if (connectedNodeIds.has(node.id)) {\n        return this.chargeHighlightStrength;\n      }\n      return 0;\n    });\n\n    this._graph.d3ReheatSimulation();\n  }\n\n  private clearHighlight() {\n    this.selectedNode = null;\n    this.selectedLink = null;\n    this.highlightedNodes.clear();\n    this.highlightedLinks.clear();\n    \n    this._graph.d3Force(\"link\").distance(this.linkDefaultDistance);\n    if (!this.initialLayoutComplete) return;\n    this._graph.d3Force(\"charge\").strength(0);\n    \n    setTimeout(() => {\n      const data = this._graph.graphData();\n      data.nodes.forEach((n: any) => {\n        n.fx = n.x;\n        n.fy = n.y;\n      });\n    }, 500);\n    \n    this._graph.d3ReheatSimulation();\n  }\n\n  private calculateAndCacheClusters() {\n    if (!this.nodes || !this.links) {\n      return;\n    }\n    const clusters = this.findClusters();\n    \n    this.nodes.forEach(node => {\n      const clusterId = clusters.get(node.id) || 0;\n      (node as any).clusterId = clusterId;\n    });\n    \n    this.clustersCalculated = true;\n  }\n  \n  private calculateAndCacheAdjacency() {\n    if (!this.nodes || !this.links) {\n      return;\n    }\n    \n    this.adjacencyMap.clear();\n    \n    this.nodes.forEach(node => {\n      this.adjacencyMap.set(node.id, {\n        connectedNodes: new Set<IdType>(),\n        connectedLinks: new Set<IdType>()\n      });\n    });\n    \n    this.links.forEach(link => {\n      const sourceId = typeof link.source === 'object' ? link.source.id : link.source;\n      const targetId = typeof link.target === 'object' ? link.target.id : link.target;\n      \n      const sourceAdjacency = this.adjacencyMap.get(sourceId);\n      const targetAdjacency = this.adjacencyMap.get(targetId);\n      \n      if (sourceAdjacency && targetAdjacency) {\n        sourceAdjacency.connectedNodes.add(targetId);\n        targetAdjacency.connectedNodes.add(sourceId);\n        \n        sourceAdjacency.connectedLinks.add(link.id);\n        targetAdjacency.connectedLinks.add(link.id);\n      }\n    });\n    \n    this.adjacencyCalculated = true;\n  }\n\n  private findClusters(): Map<IdType, number> {\n    const clusters = new Map<IdType, number>();\n    const visited = new Set<IdType>();\n    let clusterId = 0;\n    \n    const adjacencyList = new Map<IdType, IdType[]>();\n    this.nodes.forEach(node => {\n      adjacencyList.set(node.id, []);\n    });\n    \n    this.links.forEach(link => {\n      const sourceId = typeof link.source === 'object' ? link.source.id : link.source;\n      const targetId = typeof link.target === 'object' ? link.target.id : link.target;\n      \n      if (adjacencyList.has(sourceId) && adjacencyList.has(targetId)) {\n        adjacencyList.get(sourceId)!.push(targetId);\n        adjacencyList.get(targetId)!.push(sourceId);\n      }\n    });\n    \n    const dfs = (nodeId: IdType, currentClusterId: number) => {\n      if (visited.has(nodeId)) return;\n      \n      visited.add(nodeId);\n      clusters.set(nodeId, currentClusterId);\n      \n      const neighbors = adjacencyList.get(nodeId) || [];\n      for (const neighborId of neighbors) {\n        if (!visited.has(neighborId)) {\n          dfs(neighborId, currentClusterId);\n        }\n      }\n    };\n\n    this.nodes.forEach(node => {\n      if (!visited.has(node.id)) {\n        dfs(node.id, clusterId);\n        clusterId++;\n      }\n    });\n    \n    return clusters;\n  }\n\n  render() {\n    if (!this.clustersCalculated) {\n      this.calculateAndCacheClusters();\n    }\n    \n    if (!this.adjacencyCalculated) {\n      this.calculateAndCacheAdjacency();\n    }\n\n    const graphData = {\n      nodes: this.nodes,\n      links: this.links\n    };\n    \n    this._graph.graphData(graphData);\n\n    this._onUpdateNode = (id: IdType) => {\n      const nodeIndex = this.nodes.findIndex(n => n.id === id);\n      if (nodeIndex !== -1) {\n        const networkNode = this.network.node(id);\n        if (networkNode) {\n          this.nodes[nodeIndex].label = this.options.getNodeLabel?.(networkNode);\n          this.nodes[nodeIndex].details = this.options.getNodeDetails?.(networkNode);\n          this.nodes[nodeIndex].meta = this.options.getNodeMeta?.(networkNode);\n          \n          this._graph.graphData({\n            nodes: this.nodes,\n            links: this.links\n          });\n        }\n      }\n    };\n\n    this._onUpdateLink = (id: IdType) => {\n      const linkIndex = this.links.findIndex(l => l.id === id);\n      if (linkIndex !== -1) {\n        const networkLink = this.network.link(id);\n        if (networkLink) {\n          this.links[linkIndex].label = this.options.getLinkLabel?.(networkLink);\n          this.links[linkIndex].details = this.options.getLinkDetails?.(networkLink);\n          this.links[linkIndex].meta = this.options.getLinkMeta?.(networkLink);\n          \n          this._graph.graphData({\n            nodes: this.nodes,\n            links: this.links\n          });\n        }\n      }\n    };\n\n    this.network.on('update:node', this._onUpdateNode);\n    this.network.on('update:link', this._onUpdateLink);\n\n    setTimeout(() => {\n      if (this._graph && this._graph.zoomToFit) {\n        this._graph.zoomToFit(400, 50);\n      }\n    }, 1000);\n  }\n\n  // maintain for compatibility with NetworkRenderer\n  focusNode(id: IdType): void {}\n  blurNode(): void {}\n  focusLink(id: IdType): void {}\n  blurLink(): void {}\n}"
  },
  {
    "path": "frontend/app/src/components/graph/network/Network.ts",
    "content": "import EventEmitter from 'eventemitter3';\n\nexport type IdType = string | number;\n\nexport interface NetworkNode {\n  id: IdType;\n}\n\nexport interface NetworkLink {\n  id: IdType;\n  source: IdType;\n  target: IdType;\n}\n\nexport interface BaseNetworkOptions {\n  noDirection: boolean;\n}\n\nexport interface ReadonlyNetwork<Node extends NetworkNode, Link extends NetworkLink> extends EventEmitter<NetworkEvents>{\n  node (id: IdType): Node | undefined;\n\n  link (id: IdType): Link | undefined;\n\n  nodes (): Node[];\n\n  links (): Link[];\n\n  nodeNeighborhoods (id: IdType): Set<IdType> | null;\n}\n\nexport interface Network<Node extends NetworkNode, Link extends NetworkLink> extends ReadonlyNetwork<any, any> {\n  addNode (node: Node): void;\n\n  removeNode (nodeId: IdType): void;\n\n  addLink (link: Link): void;\n\n  removeLink (linkId: IdType): void;\n}\n\nexport interface NetworkEvents {\n  'update:node': [id: IdType];\n  'update:link': [id: IdType];\n}\n\nexport abstract class BaseNetworkView<Node extends NetworkNode, Link extends NetworkLink> extends EventEmitter<NetworkEvents> implements ReadonlyNetwork<Node, Link> {\n  abstract link (id: IdType): Link | undefined\n\n  abstract links (): Link[]\n\n  abstract node (id: IdType): Node | undefined\n\n  abstract nodes (): Node[]\n\n  abstract nodeNeighborhoods (id: IdType): Set<IdType> | null;\n\n  nodesIn (ids: Iterable<IdType>) {\n    return new NodesWhitelistView(this, new Set(ids));\n  }\n\n  nodesNotIn (ids: Iterable<IdType>) {\n    return new NodesBlacklistView(this, new Set(ids));\n  }\n\n  nodesWithAttrMatch<K extends keyof Node> (field: K, test: (value: Node[K], node: Node) => boolean) {\n    return new NodeAttributeFilterView(this, field, test);\n  }\n\n  merge<NewNode extends NetworkNode, NewLink extends NetworkLink> (nodeIdentifier: (node: Node) => IdType, mergeNodes: (nodes: Node[], identifier: IdType) => NewNode, mergeLinks: (links: Link[], identifier: string) => NewLink) {\n    const sourceNodes = this.nodes();\n    const sourceLinks = this.links();\n\n    const nodeIdentifiersMap = new Map<IdType, IdType>();\n    const nodesMap = new Map<IdType, Node[]>();\n    const linksMap = new Map<string, Link[]>;\n\n    for (let node of sourceNodes) {\n      const identifier = nodeIdentifier(node);\n      let nodes = nodesMap.get(identifier);\n      if (!nodes) {\n        nodes = [];\n        nodesMap.set(identifier, nodes);\n      }\n      nodes.push(node);\n      nodeIdentifiersMap.set(node.id, identifier);\n    }\n\n    for (let link of sourceLinks) {\n      link = { ...link };\n      link.source = nodeIdentifiersMap.get(link.source)!;\n      link.target = nodeIdentifiersMap.get(link.target)!;\n      const identifier = `${link.source}:${link.target}`;\n      let links = linksMap.get(identifier);\n      if (!links) {\n        links = [];\n        linksMap.set(identifier, links);\n      }\n      links.push(link);\n    }\n\n    const network = new BaseNetwork<NewNode, NewLink>({ noDirection: false });\n    nodesMap.forEach((nodes, identifier) => {\n      network.addNode(mergeNodes(nodes, identifier));\n    });\n    linksMap.forEach((links, identifier) => {\n      network.addLink(mergeLinks(links, identifier));\n    });\n\n    return network;\n  }\n}\n\nexport class BaseNetwork<Node extends NetworkNode, Link extends NetworkLink> extends BaseNetworkView<Node, Link> implements Network<Node, Link> {\n  private _nodes: Node[] = [];\n  private _links: Link[] = [];\n\n  private _nodesMap: Map<IdType, Node> = new Map();\n  private _linksMap: Map<IdType, Link> = new Map();\n\n  private _sourceRelMap: Map<IdType, Set<IdType>> = new Map();\n  private _targetRelMap: Map<IdType, Set<IdType>> = new Map();\n\n  readonly noDirection: boolean;\n\n  constructor ({\n    noDirection = true,\n  }: Partial<BaseNetworkOptions> = {}) {\n    super();\n    this.noDirection = noDirection;\n    if (noDirection) {\n      this._sourceRelMap = this._targetRelMap;\n    }\n  }\n\n  node (nodeId: IdType) {\n    return this._nodesMap.get(nodeId);\n  }\n\n  addNode (node: Node): void {\n    this._nodes.push(node);\n    this._nodesMap.set(node.id, node);\n  }\n\n  removeNode (nodeId: IdType) {\n    if (!this._nodesMap.has(nodeId)) {\n      return;\n    }\n\n    // remove links\n    const sourceLink = this._sourceRelMap.get(nodeId);\n    if (sourceLink) {\n      sourceLink.forEach(id => {\n        this.removeLink(id);\n      });\n    }\n    if (!this.noDirection) {\n      const targetLink = this._targetRelMap.get(nodeId);\n      if (targetLink) {\n        targetLink.forEach(id => {\n          this.removeLink(id);\n        });\n      }\n    }\n\n    this._nodesMap.delete(nodeId);\n    const idx = this._nodes.findIndex(node => node.id === node.id);\n    if (idx >= 0) {\n      this._nodes.splice(idx, 1);\n    }\n  }\n\n  link (nodeId: IdType) {\n    return this._linksMap.get(nodeId);\n  }\n\n  addLink (link: Link): void {\n    this._links.push(link);\n    this._linksMap.set(link.id, link);\n\n    let set = this._sourceRelMap.get(link.source);\n    if (!set) {\n      set = new Set();\n    }\n    this._sourceRelMap.set(link.source, set);\n    set.add(link.target);\n\n    if (!this.noDirection) {\n      let set = this._targetRelMap.get(link.target);\n      if (!set) {\n        set = new Set();\n      }\n      this._targetRelMap.set(link.target, set);\n      set.add(link.source);\n    }\n  }\n\n  removeLink (linkId: IdType): void {\n    const link = this._linksMap.get(linkId);\n    if (!link) {\n      return;\n    }\n\n    this._linksMap.delete(linkId);\n    const idx = this._links.findIndex(link => link.id === link.id);\n    if (idx >= 0) {\n      this._links.splice(idx, 1);\n    }\n  }\n\n  nodes () {\n    return this._nodes;\n  }\n\n  links () {\n    return this._links;\n  }\n\n  nodeNeighborhoods (id: IdType): Set<IdType> | null {\n    if (!this._nodesMap.has(id)) {\n      return null;\n    }\n    const set = new Set<IdType>();\n    this._sourceRelMap.get(id)?.forEach(id => set.add(id));\n    this._targetRelMap.get(id)?.forEach(id => set.add(id));\n    return set;\n  }\n\n  replaceNodeAttrs (id: IdType, partial: Omit<Node, 'id'>) {\n    if ('id' in partial) {\n      throw new Error('cannot replace node id');\n    }\n    const node = this.node(id);\n    if (node) {\n      Object.assign(node, partial);\n      this.emit('update:node', id);\n    } else {\n      throw new Error(`node ${id} not found in network`);\n    }\n  }\n\n  replaceLinkAttrs (id: IdType, partial: Omit<Link, 'id' | 'source' | 'target'>) {\n    if ('id' in partial) {\n      throw new Error('cannot replace link id');\n    }\n    if ('source' in partial || 'target' in partial) {\n      throw new Error('cannot replace link source or target');\n    }\n    const link = this.link(id);\n    if (link) {\n      Object.assign(link, partial);\n      this.emit('update:link', id);\n    } else {\n      throw new Error(`link ${id} not found in network`);\n    }\n  }\n}\n\nexport abstract class NodeFilterNetworkView<Node extends NetworkNode, Link extends NetworkLink> extends BaseNetworkView <Node, Link> {\n  protected constructor (private _source: ReadonlyNetwork<Node, Link>) {\n    super();\n  }\n\n  abstract includesNode (node: Node): boolean;\n\n  nodes () {\n    const rawNodes = this._source.nodes().filter(node => this.includesNode(node));\n    const rawIds = new Set(rawNodes.map(node => node.id));\n    const related: Node[] = [];\n\n    rawNodes.reduce((set, node) => {\n      this._source.nodeNeighborhoods(node.id)?.forEach(id => {\n        if (!rawIds.has(id)) {\n          if (!set.has(id)) {\n            set.add(id);\n            related.push(this._source.node(id)!);\n          }\n        }\n      });\n      return set;\n    }, new Set<IdType>);\n\n    return [...rawNodes, ...related];\n  }\n\n  links () {\n    const rawNodes = this._source.nodes().filter(node => this.includesNode(node));\n    const rawIds = new Set(rawNodes.map(node => node.id));\n\n    rawNodes.forEach(node => {\n      this._source.nodeNeighborhoods(node.id)?.forEach(id => {\n        rawIds.add(id);\n      });\n    });\n\n    return this._source.links().filter(link => {\n      return rawIds.has(link.source) && rawIds.has(link.target);\n    });\n  }\n\n  node (idType: IdType) {\n    return this._source.node(idType);\n  }\n\n  link (idType: IdType) {\n    return this._source.link(idType);\n  }\n\n  nodeNeighborhoods (id: IdType): Set<IdType> | null {\n    return this._source.nodeNeighborhoods(id);\n  }\n}\n\nclass NodesWhitelistView<Node extends NetworkNode, Link extends NetworkLink> extends NodeFilterNetworkView<Node, Link> {\n  constructor (view: ReadonlyNetwork<Node, Link>, private readonly _nodeSet: Set<IdType>) {\n    super(view);\n  }\n\n  includesNode (node: Node): boolean {\n    return this._nodeSet.has(node.id);\n  }\n}\n\nclass NodesBlacklistView<Node extends NetworkNode, Link extends NetworkLink> extends NodeFilterNetworkView<Node, Link> {\n  constructor (view: ReadonlyNetwork<Node, Link>, private readonly _nodeSet: Set<IdType>) {\n    super(view);\n  }\n\n  includesNode (node: Node): boolean {\n    return !this._nodeSet.has(node.id);\n  }\n}\n\nclass NodeAttributeFilterView<Node extends NetworkNode, Link extends NetworkLink, K extends keyof Node> extends NodeFilterNetworkView<Node, Link> {\n  constructor (view: ReadonlyNetwork<Node, Link>, private _attr: K, private _test: (value: Node[K], node: Node) => boolean) {super(view);}\n\n  includesNode (node: Node): boolean {\n    return this._test(node[this._attr], node);\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/graph/network/NetworkRendererOptions.ts",
    "content": "import type { SimulationNodeDatum } from 'd3';\n\nexport interface NetworkRendererOptions<Node, Link> {\n  showId?: boolean;\n  showLinkLabel?: boolean;\n\n  getNodeInitialAttrs?: (node: Node, index: number) => Pick<SimulationNodeDatum, 'x' | 'y'>;\n\n  getNodeLabel?: (node: Node) => string | undefined;\n  getNodeDetails?: (node: Node) => string | undefined;\n  getNodeMeta?: (node: Node) => any;\n  getNodeRadius?: (node: Node) => number;\n  getNodeColor?: (node: Node) => string;\n  getNodeStrokeColor?: (node: Node) => string;\n  getNodeLabelColor?: (node: Node) => string;\n  getNodeLabelStrokeColor?: (node: Node) => string;\n\n  getLinkLabel?: (node: Link) => string | undefined;\n  getLinkDetails?: (node: Link) => string | undefined;\n  getLinkMeta?: (node: Link) => any;\n\n  getLinkColor?: (link: Link) => string;\n  getLinkLabelColor?: (link: Link) => string;\n  getLinkLabelStrokeColor?: (link: Link) => string;\n\n  onClickNode?: (node: Node, event: MouseEvent) => void;\n  onClickLink?: (node: Link, event: MouseEvent) => void;\n  onClickCanvas?: () => void;\n}\n\n"
  },
  {
    "path": "frontend/app/src/components/graph/remote.ts",
    "content": "import { useEffect, useRef, useState } from 'react';\n\nexport function useRemote<Params extends any[], Data> (initialData: Data, fn: (...params: Params) => Promise<Data>, ...params: Params) {\n  const [revalidating, setRevalidating] = useState(false);\n  const [data, setData] = useState<Data>(initialData);\n  const [error, setError] = useState<unknown>(undefined);\n\n  const busyRef = useRef(false);\n  const dataRef = useRef<Data>(data);\n\n  const revalidate = () => {\n    if (busyRef.current) {\n      return;\n    }\n\n    busyRef.current = true;\n    setRevalidating(true);\n    return fn(...params)\n      .then(data => {\n        setData(data);\n        dataRef.current = data;\n      }, setError)\n      .finally(() => {\n        setRevalidating(false);\n        busyRef.current = false;\n      });\n  };\n\n  const mutate = (mutator: (prev: Data) => Data) => {\n    const newData = dataRef.current = mutator(dataRef.current);\n    setData(newData);\n    return newData;\n  };\n\n  useEffect(() => {\n    void revalidate();\n  }, [fn, ...params]);\n\n  return {\n    data,\n    revalidating,\n    error,\n    revalidate,\n    mutate,\n  };\n}"
  },
  {
    "path": "frontend/app/src/components/graph/selectEntities.ts",
    "content": "import { getEntity, type KnowledgeGraphEntity, searchEntity } from '@/api/graph';\nimport { handleServerEntity } from '@/components/graph/utils';\nimport type { RemoteEntity } from '@/components/graph/components/EntitiesTable';\nimport { type ColumnDef, getCoreRowModel, useReactTable } from '@tanstack/react-table';\nimport { type RowSelectionState } from '@tanstack/table-core';\nimport { type Dispatch, type MutableRefObject, type SetStateAction, useEffect, useMemo, useRef, useState } from 'react';\nimport useSWR from 'swr';\n\nexport type SearchEntityFilter = {\n  query: string;\n  top_k?: number\n}\n\nfunction shouldFetch (filter: SearchEntityFilter) {\n  return !!filter.query.trim();\n}\n\nexport type UseEntitiesRequired = Pick<ReturnType<typeof useEntities>, 'selectedEntities' | 'rowSelection' | 'setRowSelection' | 'entityMap'>\n\nexport function useEntities () {\n  const [rowSelection, setRowSelection] = useState<RowSelectionState>({});\n  const entityMap = useRef<Map<string, KnowledgeGraphEntity>>(undefined as never);\n  if (!entityMap.current) {\n    entityMap.current = new Map<string, KnowledgeGraphEntity>();\n  }\n\n  const selectedEntities = useMemo(() => {\n    return Object.keys(rowSelection).map(id => entityMap.current.get(id)).filter(Boolean) as KnowledgeGraphEntity[];\n  }, [rowSelection]);\n\n  const clearSelection = (id?: number) => {\n    if (id == null) {\n      setRowSelection({});\n    } else {\n      setRowSelection(selection => {\n        const idStr = String(id);\n        if (idStr in selection) {\n          selection = { ...selection };\n          delete selection[idStr];\n        }\n\n        return selection;\n      });\n    }\n  };\n\n  return {\n    selectedEntities,\n    rowSelection,\n    setRowSelection,\n    clearSelection,\n    entityMap,\n  };\n}\n\nexport function useGraphEntitiesTable (kbId: number, { rowSelection, setRowSelection, entityMap, columns }: { rowSelection: RowSelectionState, setRowSelection: Dispatch<SetStateAction<RowSelectionState>>, entityMap: MutableRefObject<Map<string, RemoteEntity>>, columns: ColumnDef<RemoteEntity, any>[] }) {\n  const [filter, setFilter] = useState<SearchEntityFilter>(() => ({ query: '', top_k: undefined }));\n\n  const { data, isLoading, error } = useSWR(shouldFetch(filter) && `api.knowledge-bases.${kbId}.graph.search-entity?query=${filter.query}&top_k=${filter.top_k}`, () => searchEntity(kbId, filter.query, filter.top_k), {\n    revalidateOnFocus: false,\n  });\n\n  const entities = useMemo(() => {\n    if (data) {\n      return data.map(serverEntity => {\n        const entity = handleServerEntity(serverEntity);\n        entityMap.current.set(String(entity.id), entity);\n        return entity;\n      });\n    } else {\n      return [];\n    }\n  }, [data]);\n\n  const table = useReactTable<RemoteEntity>({\n    data: entities,\n    getRowId: row => String(row.id),\n    columns,\n    getCoreRowModel: getCoreRowModel(),\n    enableRowSelection: true,\n    enableMultiRowSelection: true,\n    state: {\n      rowSelection,\n    },\n    onRowSelectionChange: setRowSelection,\n  });\n\n  return {\n    table,\n    filter,\n    setFilter,\n    isLoading,\n    error,\n  };\n}\n\nexport function useGraphEntitiesByIdsTable (kbId: number, { rowSelection, setRowSelection, entityMap, columns }: { rowSelection: RowSelectionState, setRowSelection: Dispatch<SetStateAction<RowSelectionState>>, entityMap: MutableRefObject<Map<string, RemoteEntity>>, columns: ColumnDef<RemoteEntity, any>[] }) {\n  const [ids, setIds] = useState<number[]>([]);\n  const [data, setData] = useState<RemoteEntity[]>([]);\n  const promisesRef = useRef<Record<string, Promise<void>>>({});\n  const [failed, setFailed] = useState<Record<string, unknown>>({});\n  const idsRef = useRef(ids);\n\n  useEffect(() => {\n    idsRef.current = ids;\n  });\n\n  useEffect(() => {\n    for (const id of ids) {\n      let entity = entityMap.current.get(String(id));\n      if (!entity) {\n        if (!promisesRef.current[String(id)]) {\n          promisesRef.current[String(id)] = getEntity(kbId, id)\n            .then(\n              entity => {\n                entityMap.current.set(String(entity.id), entity);\n                updateData();\n              },\n              error => {\n                entityMap.current.set(String(id), {\n                  id: id,\n                  isLoading: false,\n                  error,\n                });\n                setFailed(failed => ({\n                  ...failed,\n                  [String(id)]: error,\n                }));\n                updateData();\n              },\n            );\n        }\n      }\n    }\n    updateData();\n  }, [kbId, ids]);\n\n  const updateData = () => {\n    setData(idsRef.current.map(id => {\n      const entity = entityMap.current.get(String(id));\n      if (!entity) {\n        return { id, isLoading: true };\n      } else {\n        return entity;\n      }\n    }));\n  };\n\n  const table = useReactTable<RemoteEntity>({\n    data,\n    getRowId: row => String(row.id),\n    columns,\n    getCoreRowModel: getCoreRowModel(),\n    enableRowSelection: true,\n    enableMultiRowSelection: true,\n    state: {\n      rowSelection,\n    },\n    onRowSelectionChange: setRowSelection,\n  });\n\n  return {\n    table,\n    ids,\n    setIds,\n    failed,\n  };\n}\n\n\n\n\n"
  },
  {
    "path": "frontend/app/src/components/graph/useDirtyEntity.ts",
    "content": "import { updateEntity } from '@/api/graph';\nimport { useRef } from 'react';\nimport { useAction } from './action';\nimport { type Entity } from './utils';\nimport type { JsonFieldInstance } from './components/JsonField';\n\nexport function useDirtyEntity (kbId: number, id: any) {\n  const nameRef = useRef<HTMLInputElement>(null);\n  const descriptionRef = useRef<HTMLTextAreaElement>(null);\n  const metaRef = useRef<JsonFieldInstance | null>(null);\n\n  const { loading: saving, reset: resetSave, run: save, data: saveReturns, error: saveError, pending: savePending } = useAction(async () => {\n    const current = getCurrent();\n\n    if (!current) {\n      throw new Error('bad editor state');\n    }\n\n    return await updateEntity(kbId, id, current);\n  });\n\n  const reset = (entity: Entity) => {\n    if (nameRef.current) {\n      nameRef.current.value = entity.name;\n    }\n    if (descriptionRef.current) {\n      descriptionRef.current.value = entity.description;\n    }\n    if (metaRef.current) {\n      metaRef.current.value = entity.meta;\n    }\n  };\n\n  const getCurrent = () => {\n    const name = nameRef.current?.value;\n    const description = descriptionRef.current?.value;\n    const meta = metaRef.current?.value;\n\n    if (name == null || description == null || meta == null) {\n      return undefined;\n    }\n    return {\n      name,\n      description,\n      meta,\n    };\n  };\n\n  return {\n    nameRef,\n    descriptionRef,\n    metaRef,\n    reset,\n    save,\n    saving,\n    saveError,\n    savePending,\n    saveReturns,\n    resetSave,\n  };\n}"
  },
  {
    "path": "frontend/app/src/components/graph/useDirtyRelationship.ts",
    "content": "import { updateRelationship } from '@/api/graph';\nimport { useRef } from 'react';\nimport { useAction } from './action';\nimport { type Relationship } from './utils';\nimport type { JsonFieldInstance } from './components/JsonField';\n\nexport function useDirtyRelationship (kbId: number, id: any) {\n  const descriptionRef = useRef<HTMLTextAreaElement>(null);\n  const weightRef = useRef<HTMLInputElement>(null);\n  const metaRef = useRef<JsonFieldInstance | null>(null);\n\n  const { loading: saving, reset: resetSave, run: save, data: saveReturns, error: saveError, pending: savePending } = useAction(async () => {\n    const current = getCurrent();\n\n    if (!current) {\n      throw new Error('bad editor state');\n    }\n\n    return await updateRelationship(kbId, id, current);\n  });\n\n  const reset = (relationship: Relationship) => {\n    if (weightRef.current) {\n      weightRef.current.value = String(relationship.weight);\n    }\n    if (descriptionRef.current) {\n      descriptionRef.current.value = relationship.description;\n    }\n    if (metaRef.current) {\n      metaRef.current.value = relationship.meta;\n    }\n  };\n\n  const getCurrent = () => {\n    const weight = weightRef.current?.value;\n    const description = descriptionRef.current?.value;\n    const meta = metaRef.current?.value;\n\n    if (weight == null || description == null || meta == null) {\n      return undefined;\n    }\n    return {\n      weight: parseInt(weight),\n      description,\n      relationship_desc: description,\n      meta,\n    };\n  };\n\n  return {\n    weightRef,\n    descriptionRef,\n    metaRef,\n    reset,\n    save,\n    saving,\n    saveError,\n    savePending,\n    saveReturns,\n    resetSave,\n  };\n}"
  },
  {
    "path": "frontend/app/src/components/graph/useNetwork.ts",
    "content": "import { type Entity, handleServerGraph, type Relationship, type ServerGraphData } from '@/components/graph/utils';\nimport { BaseNetwork } from '@/components/graph/network/Network';\nimport { useMemo } from 'react';\n\nexport function useNetwork (span: { output: ServerGraphData } | ServerGraphData | undefined | null) {\n  return useMemo(() => {\n    const network = new BaseNetwork<Entity, Relationship>({ noDirection: false });\n    if (span) {\n      const { entities, relationships } = 'output' in span ? handleServerGraph(span.output) : handleServerGraph(span);\n      entities.forEach((entity: any) => network.addNode(entity));\n      relationships.forEach(({ source_entity_id, target_entity_id, ...rest }: any) => network.addLink({\n        source: source_entity_id,\n        target: target_entity_id,\n        ...rest,\n      }));\n    }\n    return network;\n  }, [span]);\n}"
  },
  {
    "path": "frontend/app/src/components/graph/utils.ts",
    "content": "import { type KnowledgeGraph, type KnowledgeGraphEntity, type KnowledgeGraphRelationship } from '@/api/graph';\n\nexport type Entity = {\n  id: number | string\n  knowledge_base_id?: number | null;\n  node_id: number;\n  name: string\n  description: string\n  meta: any\n  created_at?: string\n  updated_at?: string\n  entity_type: string\n  synopsis_info?: {\n    entities: number[]\n    topic: string\n  } | null\n}\n\nexport type Relationship = {\n  id: number | string\n  knowledge_base_id?: number | null;\n  relationship_id: number;\n  source: number | string\n  target: number | string\n  meta: any\n  description: string\n  weight: number\n}\n\nexport type ServerGraphData = KnowledgeGraph\n\nexport type GraphData = {\n  entities: Entity[]\n  relationships: Relationship[]\n  chunks?: unknown[]\n}\n\nexport function handleServerEntity (serverEntity: KnowledgeGraphEntity): Entity {\n  return {\n    ...serverEntity,\n    id: `${serverEntity.knowledge_base_id ?? 0}-${serverEntity.id}`,\n    node_id: serverEntity.id,\n  };\n}\n\nexport function handleServerRelationship ({ source_entity_id, target_entity_id, ...rest }: KnowledgeGraphRelationship): Relationship {\n  return ({\n    ...rest,\n    id: `${rest.knowledge_base_id ?? 0}-${rest.id}`,\n    relationship_id: rest.id,\n    source: `${rest.knowledge_base_id ?? 0}-${source_entity_id}`,\n    target: `${rest.knowledge_base_id ?? 0}-${target_entity_id}`,\n  });\n}\n\nexport const handleServerGraph = <T extends {}> ({ entities, relationships, ...rest }: ServerGraphData & T): GraphData & T => {\n  return {\n    ...rest,\n    relationships: relationships.map(handleServerRelationship),\n    entities: entities.map(handleServerEntity),\n  } as never;\n};\n"
  },
  {
    "path": "frontend/app/src/components/gtag-provider.tsx",
    "content": "'use client';\n\nimport { createContext, type ReactNode, useContext, useMemo } from 'react';\n\nexport interface GtagFn {\n  (command: 'event', event: Gtag.EventNames | (string & {}), eventParams?: Omit<Gtag.ControlParams | Gtag.EventParams | Gtag.CustomParams, 'send_to'>): void;\n}\n\nexport interface GtagProviderValues {\n  gtagFn: GtagFn;\n}\n\nconst GtagContext = createContext<GtagProviderValues>({\n  gtagFn: () => {},\n});\n\nexport interface GtagProviderProps {\n  configured: boolean;\n  gtagId?: string | null;\n  group?: string | null;\n  children?: ReactNode;\n  // Use custom gtag fn to send events. defaults to window.gtag (On demand).\n  gtagFn?: GtagFn;\n}\n\nexport function GtagProvider ({ configured, gtagId, group, gtagFn: propGtagFn, children }: GtagProviderProps) {\n  const gtagFn = useMemo(() => {\n    if (configured && gtagId) {\n      return (_: 'event', event: Gtag.EventNames | (string & {}), eventParams?: Omit<Gtag.ControlParams & Gtag.EventParams & Gtag.CustomParams, 'send_to'> | undefined): void => {\n        if (_ !== 'event') {\n          // Only support event method\n          return;\n        }\n        (propGtagFn ?? gtag)?.('event', event, { ...eventParams, send_to: gtagId, groups: eventParams?.groups ? [group, ...eventParams.groups] : group });\n      };\n    } else {\n      return () => {};\n    }\n  }, [propGtagFn, configured, gtagId, group]);\n\n  return (\n    <GtagContext.Provider value={{ gtagFn }}>\n      {children}\n    </GtagContext.Provider>\n  );\n}\n\nexport function useGtagFn () {\n  return useContext(GtagContext).gtagFn;\n}\n"
  },
  {
    "path": "frontend/app/src/components/html-viewer.tsx",
    "content": "'use client';\n\nimport Highlight from 'highlight.js/lib/core';\nimport html from 'highlight.js/lib/languages/xml';\nimport { useEffect, useState } from 'react';\nimport './code-theme.scss';\n\nHighlight.registerLanguage('html', html);\n\nexport function HtmlViewer ({ value: propValue }: { value: string }) {\n  const [value, setValue] = useState(() => propValue.replaceAll('<', '&lt;'));\n\n  useEffect(() => {\n    setValue(propValue);\n    try {\n      const { value: result } = Highlight.highlight(propValue, { language: 'html' });\n      setValue(result);\n    } catch {\n    }\n  }, [propValue]);\n\n  return (\n    <code>\n      <pre className=\"whitespace-pre-wrap text-xs font-mono\" dangerouslySetInnerHTML={{ __html: value }} />\n    </code>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/icons/index.ts",
    "content": "export { default as FilesIcon } from './files.svg'\nexport { default as MarkdownIcon } from './markdown.svg'\nexport { default as TailwindIcon } from './tailwind.svg'\nexport { default as LinkIcon } from './link.svg'\nexport { default as LightningIcon } from './lightning.svg'\nexport { default as GlobeIcon } from './globe.svg'\nexport { default as PictureIcon } from './picture.svg'\nexport { default as CodeIcon } from './code.svg'\nexport { default as BrushIcon } from './brush.svg'\nexport { default as DropperIcon } from './dropper.svg'\nexport { default as StarsIcon } from './stars.svg'\nexport { default as FormulaIcon } from './formula.svg'\nexport { default as WarningIcon } from './warning.svg'\nexport { default as ChevronRightIcon } from './chevron-right.svg'\nexport { default as BoxIcon } from './box.svg'\nexport { default as GearIcon } from './gear.svg'\nexport { default as RowsIcon } from './rows.svg'\nexport { default as CardsIcon } from './cards.svg'\nexport { default as OneIcon } from './one.svg'\nexport { default as CloudIcon } from './cloud.svg'\nexport { default as TableIcon } from './table.svg'\nexport { default as FileIcon } from './file.svg'\nexport { default as NewsletterIcon } from './newsletter.svg'\nexport { default as ArrowRightIcon } from './arrow-right.svg'\nexport { default as SwitchIcon } from './switch.svg'\nexport { default as TerminalIcon } from './terminal.svg'\nexport { default as DiagramIcon } from './diagram.svg'\nexport { default as FolderTreeIcon } from './folder-tree.svg'\nexport { default as IdCardIcon } from './id-card.svg'\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/create-knowledge-base-form.stories.tsx",
    "content": "import { CreateKnowledgeBaseForm } from '@/components/knowledge-base/create-knowledge-base-form';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport { AppRouterContext } from 'next/dist/shared/lib/app-router-context.shared-runtime';\n\nconst meta = {\n  title: 'Components/KnowledgeBase/CreateKnowledgeBaseForm',\n  component: CreateKnowledgeBaseForm,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ width: 640 }}>\n        <AppRouterContext value={{} as any}>\n          <Story />\n        </AppRouterContext>\n      </div>\n    ),\n  ],\n  argTypes: {},\n  args: {},\n} satisfies Meta<typeof CreateKnowledgeBaseForm>;\n\nexport default meta;\n\ntype Story = StoryObj<typeof meta>\n\nexport const Default: Story = {\n  args: {\n    onCreated () {},\n  },\n};\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/create-knowledge-base-form.tsx",
    "content": "import { createKnowledgeBase } from '@/api/knowledge-base';\nimport { EmbeddingModelSelect, LLMSelect } from '@/components/form/biz';\nimport { FormInput, FormTextarea } from '@/components/form/control-widget';\nimport { withCreateEntityForm } from '@/components/form/create-entity-form';\nimport { FormIndexMethods } from '@/components/knowledge-base/form-index-methods';\nimport { mutateKnowledgeBases } from '@/components/knowledge-base/hooks';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\nimport { z } from 'zod';\n\nconst Form = withCreateEntityForm(z.object({\n  name: z.string().min(1),\n  description: z.string(),\n  index_methods: z.enum(['knowledge_graph', 'vector']).array(),\n  llm_id: z.number().nullable().optional(),\n  embedding_model_id: z.number().nullable().optional(),\n  data_sources: z.never().array().length(0), // Use external page to create data source.\n}), createKnowledgeBase);\n\nexport function CreateKnowledgeBaseForm ({}: {}) {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n\n  return (\n    <Form\n      transitioning={transitioning}\n      onCreated={kb => {\n        startTransition(() => {\n          router.push(`/knowledge-bases/${kb.id}/data-sources`);\n          router.refresh();\n        });\n        void mutateKnowledgeBases();\n      }}\n      defaultValues={{\n        name: '',\n        description: '',\n        llm_id: undefined,\n        data_sources: [],\n        embedding_model_id: undefined,\n        index_methods: ['vector'],\n      }}\n    >\n      <Form.Basic name=\"name\" label=\"Name\">\n        <FormInput placeholder=\"The name of the knowledge base\" />\n      </Form.Basic>\n      <Form.Basic name=\"description\" label=\"Description\">\n        <FormTextarea placeholder=\"The description of the knowledge base\" />\n      </Form.Basic>\n      <Form.Basic name=\"llm_id\" label=\"LLM\" description=\"Specify the LLM used in building the index. If not specified, the default model will be used.\">\n        <LLMSelect />\n      </Form.Basic>\n      <Form.Basic name=\"embedding_model_id\" label=\"Embedding Model\" description=\"Specify the embedding model used to convert the corpus into vector embedding. If not specified, the default model will be used.\">\n        <EmbeddingModelSelect />\n      </Form.Basic>\n      <Form.Basic name=\"index_methods\" label=\"Index Methods\">\n        <FormIndexMethods />\n      </Form.Basic>\n    </Form>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/document-chunks-table.tsx",
    "content": "import { getKnowledgeBaseDocumentChunks, type KnowledgeGraphDocumentChunk } from '@/api/knowledge-base';\nimport { DataTable } from '@/components/data-table';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport useSWR from 'swr';\n\nexport function DocumentChunksTable ({ knowledgeBaseId, documentId }: { knowledgeBaseId: number, documentId: number }) {\n  const { data = [], isLoading } = useSWR(`api.knowledge-bases.${knowledgeBaseId}.document.${documentId}.chunks`, () => getKnowledgeBaseDocumentChunks(knowledgeBaseId, documentId), {\n    revalidateOnFocus: false,\n  });\n\n  return (\n    <DataTable\n      columns={columns}\n      data={data}\n      loading={isLoading}\n    />\n  );\n}\n\nconst columnsHelper = createColumnHelper<KnowledgeGraphDocumentChunk>();\n\nconst columns: ColumnDef<KnowledgeGraphDocumentChunk, any>[] = [\n  columnsHelper.accessor('id', {}),\n  columnsHelper.accessor('hash', {}),\n  columnsHelper.accessor('text', {}),\n];\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/empty-state.tsx",
    "content": "import { LibraryBig } from 'lucide-react';\n\nexport default function KnowledgeBaseEmptyState () {\n  return (\n    <div className=\"flex flex-col items-center justify-center h-[50vh] gap-6 rounded-md\">\n      <div className=\"flex items-center justify-center w-20 h-20 rounded-full bg-gray-200 dark:bg-gray-800\">\n        <LibraryBig size={40} />\n      </div>\n      <div className=\"space-y-2 text-center\">\n        <h2 className=\"text-2xl font-bold tracking-tight\">No knowledge base to display</h2>\n        <p className=\"text-gray-500 dark:text-gray-400\">\n          To enable AI assistant generate more accurate answers, please follow the steps:\n        </p>\n        <p className=\"text-gray-500 dark:text-gray-400\">\n          1. Create a knowledge base -&gt;\n          2. Import the documents from certain domain -&gt;\n          3. Linked the knowledge base to the chat engine\n        </p>\n      </div>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/form-index-methods.tsx",
    "content": "import type { CreateKnowledgeBaseParams } from '@/api/knowledge-base';\nimport type { FormControlWidgetProps } from '@/components/form/control-widget';\nimport { Label } from '@/components/ui/label';\nimport { Switch } from '@/components/ui/switch';\nimport { forwardRef, type ReactNode, useId } from 'react';\n\nexport const FormIndexMethods = forwardRef<any, FormControlWidgetProps<CreateKnowledgeBaseParams['index_methods']>>(({ value, onChange }, ref) => {\n  return (\n    <div className=\"space-y-2\" ref={ref}>\n      <IndexMethod\n        label=\"Vector Index (Forced)\"\n        description={\n          <>\n            Use <a href=\"https://docs.pingcap.com/tidbcloud/vector-search-overview#vector-embedding\">vector embedding</a> to represent\n            documents so that relevant documents can be retrieved based on their semantics.\n          </>\n        }\n        disabled\n        checked={value?.includes('vector') ?? false}\n        onCheckedChange={checked => {\n          const set = new Set(value);\n          if (checked) {\n            set.add('vector');\n          } else {\n            set.delete('vector');\n          }\n          onChange?.(Array.from(set));\n        }}\n      />\n      <IndexMethod\n        label=\"Knowledge Graph Index\"\n        description=\"Extract the entities and relationships form the documents and use knowledge graphs to represent, enhance the logic and reasoning capabilities of the retrieval process.\"\n        checked={value?.includes('knowledge_graph') ?? false}\n        onCheckedChange={checked => {\n          const set = new Set(value);\n          if (checked) {\n            // graph index requires vector index.\n            set.add('vector');\n            set.add('knowledge_graph');\n          } else {\n            set.delete('knowledge_graph');\n          }\n          onChange?.(Array.from(set));\n        }}\n      />\n    </div>\n  );\n});\n\nFormIndexMethods.displayName = 'FormIndexMethods';\n\nfunction IndexMethod ({ disabled, label, description, checked, onCheckedChange }: { disabled?: boolean, label: ReactNode, description: ReactNode, checked: boolean, onCheckedChange: (value: boolean) => void }) {\n  const id = useId();\n\n  return (\n    <div className=\"flex flex-row items-center justify-between rounded-lg border p-4\">\n      <div className=\"space-y-0.5\">\n        <Label id={`${id}-label`} className=\"text-base\" htmlFor={id}>\n          {label}\n        </Label>\n        <p id={`${id}-description`} className=\"text-sm text-muted-foreground\">\n          {description}\n        </p>\n      </div>\n      <Switch\n        id={id}\n        disabled={disabled}\n        checked={checked}\n        onCheckedChange={onCheckedChange}\n        aria-labelledby={`${id}-label`}\n        aria-describedby={`${id}-description`}\n      />\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/hooks.ts",
    "content": "import { listDataSources } from '@/api/datasources';\nimport { getKnowledgeGraphIndexProgress, listKnowledgeBases } from '@/api/knowledge-base';\nimport { listAllHelper } from '@/lib/request';\nimport useSWR, { mutate } from 'swr';\n\nexport function useKnowledgeBaseIndexProgress (id: number) {\n  const { data: progress, ...rest } = useSWR(`api.knowledge-base.${id}.index-progress`, () => getKnowledgeGraphIndexProgress(id));\n  return { progress, ...rest };\n}\n\nexport function useAllKnowledgeBases (flag = true) {\n  return useSWR(flag && `api.knowledge-bases.list-all`, () => listAllHelper(listKnowledgeBases, 'id'));\n}\n\nexport function useKnowledgeBase (id: number | null | undefined) {\n  const { data, mutate, ...rest } = useAllKnowledgeBases(id != null);\n\n  return {\n    knowledgeBase: data?.find(llm => llm.id === id),\n    ...rest,\n  };\n}\n\nexport function useAllKnowledgeBaseDataSources (kbId: number, flag = true) {\n  return useSWR(flag && `api.knowledge-bases.${kbId}.data-sources.list-all`, () => listAllHelper((params) => listDataSources(kbId, params), 'id'));\n}\n\nexport function mutateKnowledgeBases () {\n  return mutate(key => {\n    if (typeof key === 'string') {\n      return key.startsWith(`api.knowledge-bases.`);\n    }\n    return false;\n  });\n}\n\nexport function mutateKnowledgeBaseDataSources (kbId: number) {\n  return mutate(`api.knowledge-bases.${kbId}.data-sources.list-all`);\n}"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/knowledge-base-card.stories.tsx",
    "content": "import { KnowledgeBaseCard, KnowledgeBaseCardPlaceholder } from '@/components/knowledge-base/knowledge-base-card';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport { AppRouterContext } from 'next/dist/shared/lib/app-router-context.shared-runtime';\nimport type { FC } from 'react';\n\nconst meta = {\n  title: 'Components/KnowledgeBase/KnowledgeBaseCard',\n  component: KnowledgeBaseCard,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ minWidth: 250 }}>\n        <AppRouterContext.Provider value={{} as any}>\n          <Story />\n        </AppRouterContext.Provider>\n      </div>\n    ),\n  ],\n  argTypes: {},\n  args: {},\n  subcomponents: {\n    KnowledgeBaseCardPlaceholder: KnowledgeBaseCardPlaceholder as FC<any>,\n  },\n} satisfies Meta<typeof KnowledgeBaseCard>;\n\nexport default meta;\n\ntype Story = StoryObj<typeof meta>\n\nexport const Default: Story = {\n  args: {\n    knowledgeBase: {\n      id: 1,\n      name: 'Some KB',\n      description: 'Some Description',\n      index_methods: ['vector', 'knowledge_graph'],\n      creator: { id: 'xxx' },\n      created_at: new Date(),\n      updated_at: new Date(),\n    },\n  },\n};\n\nexport const Placeholder: StoryObj<typeof KnowledgeBaseCardPlaceholder> = {\n  render () {\n    return <KnowledgeBaseCardPlaceholder />;\n  },\n};\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/knowledge-base-card.tsx",
    "content": "import { deleteKnowledgeBase, getKnowledgeBaseLinkedChatEngines, type KnowledgeBaseSummary } from '@/api/knowledge-base';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { mutateKnowledgeBases } from '@/components/knowledge-base/hooks';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { Badge } from '@/components/ui/badge';\nimport { Button } from '@/components/ui/button';\nimport { Card, CardContent, CardDescription, CardFooter, CardHeader } from '@/components/ui/card';\nimport { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuSeparator, DropdownMenuTrigger } from '@/components/ui/dropdown-menu';\nimport { Separator } from '@/components/ui/separator';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { cn } from '@/lib/utils';\nimport { AlertTriangleIcon, Book, Ellipsis, TriangleAlertIcon } from 'lucide-react';\nimport { useRouter } from 'next/navigation';\nimport { ReactNode, startTransition, useState } from 'react';\nimport useSWR from 'swr';\n\nexport function KnowledgeBaseCard ({ knowledgeBase, children }: { knowledgeBase: KnowledgeBaseSummary, children?: ReactNode }) {\n  const router = useRouter();\n  const [dropdownOpen, setDropdownOpen] = useState(false);\n  const { data: linkedChatEngines } = useSWR(`api.knowledge-bases.${knowledgeBase.id}.linked-chat-engines`, () => getKnowledgeBaseLinkedChatEngines(knowledgeBase.id));\n\n  const handleCardClick = () => {\n    startTransition(() => {\n      router.push(`/knowledge-bases/${knowledgeBase.id}`);\n    });\n  };\n\n  const handleMenuItemSettingSelect = (event: Event) => {\n    event.preventDefault();\n    startTransition(() => {\n      router.push(`/knowledge-bases/${knowledgeBase.id}/settings`);\n    });\n  };\n\n  const handleDelete = async () => {\n    await deleteKnowledgeBase(knowledgeBase.id);\n    await mutateKnowledgeBases();\n    setDropdownOpen(false);\n  };\n\n  return (\n    <Card className={cn('cursor-pointer transition-colors hover:bg-muted/50 max-h-64', dropdownOpen && 'bg-muted/50')} onClick={handleCardClick}>\n      <CardHeader className=\"p-4\">\n        <div className=\"flex justify-start space-x-4\">\n          <div className=\"flex border w-10 h-10 rounded-md justify-center items-center bg-secondary\">\n            <Book className=\"size-5\" />\n          </div>\n          <div className=\"flex-1 space-y-1\">\n            <h4 className=\"text-sm font-semibold\">{knowledgeBase.name}</h4>\n            <div className=\"flex items-center text-xs text-muted-foreground\">\n              <span>{knowledgeBase.documents_total ?? 0} documents</span>\n              <span className=\"shrink-0 mx-0.5 px-1\">·</span>\n              <span>{(knowledgeBase.data_sources_total ?? 0) || <><AlertTriangleIcon className=\"size-3 inline-flex\" /> No</>} data sources</span>\n            </div>\n            <div className=\"flex items-center text-xs text-muted-foreground\">\n              <span>{linkedChatEngines?.length ?? <Skeleton className=\"inline-flex h-3 w-6 rounded\" />} linked chat engines</span>\n            </div>\n          </div>\n        </div>\n      </CardHeader>\n      <CardContent>\n        <CardDescription className=\"text-xs line-clamp-2 text-muted-foreground\">\n          {knowledgeBase.description}\n        </CardDescription>\n      </CardContent>\n      <CardFooter className=\"flex justify-between items-center text-sm p-2\">\n        <div className=\"flex items-center gap-2 pl-2\">\n          {knowledgeBase.index_methods.map(m => <Badge key={m} variant=\"secondary\">{m}</Badge>)}\n        </div>\n        <div>\n          <Separator orientation=\"vertical\" />\n          <DropdownMenu open={dropdownOpen} onOpenChange={setDropdownOpen}>\n            <DropdownMenuTrigger asChild>\n              <Button variant=\"ghost\" size=\"sm\" onClick={event => event.stopPropagation()}>\n                <Ellipsis className=\"size-5\" />\n              </Button>\n            </DropdownMenuTrigger>\n            <DropdownMenuContent className=\"w-56\" align=\"end\" alignOffset={-9} onClick={event => event.stopPropagation()}>\n              <DropdownMenuItem onSelect={handleMenuItemSettingSelect}>Settings</DropdownMenuItem>\n              <DropdownMenuSeparator />\n              <DangerousActionButton\n                action={handleDelete}\n                asChild\n                actionDisabled={(linkedChatEngines?.length ?? 0) > 0}\n                actionDisabledReason={<Alert variant=\"warning\">\n                  <TriangleAlertIcon />\n                  <AlertTitle>Cannot delete this Knowledge Base</AlertTitle>\n                  <AlertDescription>This Knowledge Base was linked to at least one Chat Engine(s). Please unlink all Chat Engines to continue.</AlertDescription>\n                </Alert>}\n              >\n                <DropdownMenuItem\n                  className=\"text-destructive focus:text-destructive focus:bg-destructive/10\"\n                  disabled={linkedChatEngines == null}\n                  onSelect={event => event.preventDefault()}\n                >\n                  Delete\n                </DropdownMenuItem>\n              </DangerousActionButton>\n            </DropdownMenuContent>\n          </DropdownMenu>\n        </div>\n      </CardFooter>\n    </Card>\n  );\n}\n\nexport function KnowledgeBaseCardPlaceholder () {\n  return (\n    <Card className=\"max-h-64\">\n      <CardHeader className=\"p-4\">\n        <div className=\"flex justify-start space-x-4\">\n          <Skeleton className=\"size-10\" />\n          <div className=\"flex-1 space-y-1\">\n            <h4 className=\"text-sm font-semibold\"><Skeleton className=\"w-28 h-[1em] mt-[0.25em] mb-[0.5em]\" /></h4>\n            <div className=\"flex items-center text-xs text-muted-foreground gap-2\">\n              <Skeleton className=\"w-16 h-[1em]\" />\n              <Skeleton className=\"w-24 h-[1em]\" />\n            </div>\n          </div>\n        </div>\n      </CardHeader>\n      <CardContent>\n        <div className=\"text-muted-foreground text-xs line-clamp-2\">\n          <Skeleton className=\"w-full h-[1em] my-[0.25em]\" />\n          <Skeleton className=\"w-[70%] h-[1em] my-[0.25em]\" />\n        </div>\n      </CardContent>\n      <CardFooter className=\"flex items-center text-sm p-2\">\n        <div className=\"flex items-center gap-2 pl-2\">\n          <Skeleton className=\"rounded-full w-16 h-[1.25em]\" />\n          <Skeleton className=\"rounded-full w-24 h-[1.25em]\" />\n        </div>\n      </CardFooter>\n    </Card>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/knowledge-base-chunking-config-fields.tsx",
    "content": "import { type KnowledgeBase, type KnowledgeBaseChunkingConfig, type KnowledgeBaseChunkingConfigAdvanced, type KnowledgeBaseChunkingConfigGeneral, knowledgeBaseChunkingConfigSchema, type KnowledgeBaseChunkingMarkdownSplitterConfig, type KnowledgeBaseChunkingSentenceSplitterConfig, type KnowledgeBaseChunkingSplitterRule } from '@/api/knowledge-base';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { createAccessorHelper, GeneralSettingsField } from '@/components/settings-form';\nimport { FormField, FormItem, FormLabel } from '@/components/ui/form.beta';\nimport { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';\nimport { ToggleGroup, ToggleGroupItem } from '@/components/ui/toggle-group';\nimport { cn } from '@/lib/utils';\nimport { cloneElement, type ReactElement } from 'react';\n\nconst helper = createAccessorHelper<KnowledgeBase>();\n\nexport function KnowledgeBaseChunkingConfigFields () {\n  return (\n    <GeneralSettingsField\n      accessor={helper.field('chunking_config', defaultConfig)}\n      schema={knowledgeBaseChunkingConfigSchema}\n    >\n      <ModeSwitch />\n    </GeneralSettingsField>\n  );\n}\n\nconst fieldLayout = formFieldLayout<{ value: KnowledgeBaseChunkingConfigGeneral }>();\nconst advancedFieldLayout = formFieldLayout<{ value: KnowledgeBaseChunkingConfigAdvanced }>();\n\nfunction ModeSwitch () {\n  return (\n    <FormField<{ value: KnowledgeBase['chunking_config'] }, 'value'>\n      name=\"value\"\n      render={(field, form) => <>\n        <FormItem>\n          <FormLabel>\n            Chunking Mode\n          </FormLabel>\n          <ToggleGroup\n            className=\"w-full flex items-center\"\n            type=\"single\"\n            value={field.state.value?.mode ?? undefined}\n            onValueChange={(value => {\n              field.setValue(switchMode(value as never));\n            })}\n            onBlur={field.handleBlur}\n          >\n            <ToggleGroupItem className=\"flex-1 border block text-left font-normal h-auto py-4 opacity-50 data-[state=on]:opacity-100 hover:opacity-100 hover:bg-transparent hover:text-foreground transition-all\" value=\"general\">\n              <div className=\"font-semibold\">\n                General\n              </div>\n              <p className=\"text-muted-foreground text-xs\">Use best practices to process different types of documents</p>\n            </ToggleGroupItem>\n            <ToggleGroupItem className=\"flex-1 border block text-left font-normal h-auto py-4 opacity-50 data-[state=on]:opacity-100 hover:opacity-100 hover:bg-transparent hover:text-foreground transition-all\" value=\"advanced\">\n              <div className=\"font-semibold\">\n                Advanced\n              </div>\n              <p className=\"text-muted-foreground text-xs\">Customize the process for different file types by rules</p>\n            </ToggleGroupItem>\n          </ToggleGroup>\n          <div className=\"pl-4 border-l-4\">\n            {form.state.values.value?.mode === 'general' && <GeneralChunkingConfig />}\n            {form.state.values.value?.mode === 'advanced' && <AdvancedChunkingConfig />}\n          </div>\n        </FormItem>\n      </>}\n    />\n  );\n}\n\nfunction GeneralChunkingConfig () {\n  return (\n    <div className=\"grid md:grid-cols-3 gap-4\">\n      <fieldLayout.Basic name=\"value.chunk_size\" label=\"Chunk Size\">\n        <FormInputLayout suffix=\"tokens\">\n          <FormInput type=\"number\" />\n        </FormInputLayout>\n      </fieldLayout.Basic>\n      <fieldLayout.Basic name=\"value.chunk_overlap\" label=\"Chunk Overlap\">\n        <FormInputLayout suffix=\"tokens\">\n          <FormInput type=\"number\" />\n        </FormInputLayout>\n      </fieldLayout.Basic>\n      <fieldLayout.Basic name=\"value.paragraph_separator\" label=\"Paragraph Separator\">\n        <FormInput />\n      </fieldLayout.Basic>\n    </div>\n  );\n}\n\nfunction AdvancedChunkingConfig () {\n  return (\n    <div className=\"space-y-4\">\n      <div className=\"space-y-2\">\n        <div className=\"text-sm font-medium text-muted-foreground\">Plain Text (text/plain)</div>\n        <SplitterRuleConfig rule=\"text/plain\" />\n      </div>\n      <div className=\"space-y-2\">\n        <div className=\"text-sm font-medium text-muted-foreground\">Markdown (text/markdown)</div>\n        <SplitterRuleConfig rule=\"text/markdown\" />\n      </div>\n    </div>\n  );\n}\n\nfunction SplitterRuleConfig ({ rule }: { rule: keyof KnowledgeBaseChunkingConfigAdvanced['rules'] }) {\n  const name = `value.rules.${rule}` as const;\n  return (\n    <div className=\"space-y-4\">\n      <FormField<{ value: KnowledgeBaseChunkingConfigAdvanced }, typeof name>\n        name={name}\n        render={(field, form) => (\n          <>\n            <Select\n              name={name}\n              value={field.state.value.splitter}\n              onValueChange={value => {\n                field.setValue(({\n                  splitter: value,\n                  splitter_config: switchSplitter(value as never),\n                } as KnowledgeBaseChunkingSplitterRule));\n              }}\n            >\n              <SelectTrigger>\n                <SelectValue />\n              </SelectTrigger>\n              <SelectContent>\n                <SelectItem value=\"SentenceSplitter\">SentenceSplitter</SelectItem>\n                <SelectItem value=\"MarkdownSplitter\">MarkdownSplitter</SelectItem>\n              </SelectContent>\n            </Select>\n\n            {field.state.value.splitter === 'SentenceSplitter' && (\n              <div className=\"pl-4 grid grid-cols-3 gap-4\">\n                <advancedFieldLayout.Basic name={`value.rules.${rule}.splitter_config.chunk_size`} label=\"Chunk Size\">\n                  <FormInputLayout suffix=\"tokens\">\n                    <FormInput type=\"number\" min={1} step={1} />\n                  </FormInputLayout>\n                </advancedFieldLayout.Basic>\n                <advancedFieldLayout.Basic name={`value.rules.${rule}.splitter_config.chunk_overlap`} label=\"Chunk Overlap\">\n                  <FormInputLayout suffix=\"tokens\">\n                    <FormInput type=\"number\" min={0} step={1} />\n                  </FormInputLayout>\n                </advancedFieldLayout.Basic>\n                <advancedFieldLayout.Basic name={`value.rules.${rule}.splitter_config.paragraph_separator`} label=\"Paragraph Separator\">\n                  <FormInput />\n                </advancedFieldLayout.Basic>\n              </div>\n            )}\n            {field.state.value.splitter === 'MarkdownSplitter' && (\n              <div className=\"pl-4 grid grid-cols-3 gap-4\">\n                <advancedFieldLayout.Basic name={`value.rules.${rule}.splitter_config.chunk_size`} label=\"Chunk Size\">\n                  <FormInputLayout suffix=\"tokens\">\n                    <FormInput type=\"number\" min={1} step={1} />\n                  </FormInputLayout>\n                </advancedFieldLayout.Basic>\n                <advancedFieldLayout.Basic name={`value.rules.${rule}.splitter_config.chunk_header_level`} label=\"Chunk Header Level\">\n                  <FormInput type=\"number\" min={1} max={6} step={1} />\n                </advancedFieldLayout.Basic>\n              </div>\n            )}\n          </>\n        )}\n      />\n    </div>\n  );\n}\n\nfunction FormInputLayout ({ suffix, children, ...props }: { suffix: string, children: ReactElement }) {\n  return (\n    <div className=\"relative\">\n      {cloneElement(children, {\n        className: cn((props as any).className, 'pr-14'),\n        ...props,\n      } as any)}\n      <span className=\"absolute h-full top-0 right-1 flex items-center px-2 text-muted-foreground text-xs font-medium select-none\">\n        {suffix}\n      </span>\n    </div>\n  );\n}\n\nfunction switchMode (mode: KnowledgeBaseChunkingConfig['mode']): KnowledgeBaseChunkingConfig {\n  switch (mode) {\n    case 'general':\n      return {\n        mode: 'general',\n        ...switchSplitter('SentenceSplitter'),\n      };\n    case 'advanced': {\n      return {\n        mode: 'advanced',\n        rules: {\n          'text/plain': {\n            splitter: 'SentenceSplitter',\n            splitter_config: switchSplitter('SentenceSplitter'),\n          },\n          'text/markdown': {\n            splitter: 'MarkdownSplitter',\n            splitter_config: switchSplitter('MarkdownSplitter'),\n          },\n        },\n      };\n    }\n  }\n}\n\nfunction switchSplitter (splitter: 'SentenceSplitter'): KnowledgeBaseChunkingSentenceSplitterConfig;\nfunction switchSplitter (splitter: 'MarkdownSplitter'): KnowledgeBaseChunkingMarkdownSplitterConfig;\nfunction switchSplitter (splitter: 'SentenceSplitter' | 'MarkdownSplitter') {\n  switch (splitter) {\n    case 'SentenceSplitter':\n      return {\n        chunk_size: 1024,\n        chunk_overlap: 200,\n        paragraph_separator: '\\\\n\\\\n',\n      } satisfies KnowledgeBaseChunkingSentenceSplitterConfig;\n    case 'MarkdownSplitter':\n      return {\n        chunk_size: 1200,\n        chunk_header_level: 2,\n      } satisfies KnowledgeBaseChunkingMarkdownSplitterConfig;\n  }\n}\n\nconst defaultConfig = switchMode('general');\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/knowledge-base-index.tsx",
    "content": "'use client';\n\nimport { actions } from '@/components/cells/actions';\nimport { type DatasourceKgIndexError, type DatasourceVectorIndexError } from '@/api/datasources';\nimport { listKnowledgeBaseKgIndexErrors, listKnowledgeBaseVectorIndexErrors, rebuildKBDocumentIndex, retryKnowledgeBaseAllFailedTasks } from '@/api/knowledge-base';\nimport { errorMessageCell } from '@/components/cells/error-message';\nimport { link } from '@/components/cells/link';\nimport { IndexProgressChart, IndexProgressChartPlaceholder } from '@/components/charts/IndexProgressChart';\nimport { TotalCard } from '@/components/charts/TotalCard';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { useKnowledgeBaseIndexProgress } from '@/components/knowledge-base/hooks';\nimport { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { ArrowRightIcon, DownloadIcon, FileTextIcon, PuzzleIcon, RouteIcon, WrenchIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { toast } from 'sonner';\nimport { getErrorMessage } from '@/lib/errors';\n\nexport function KnowledgeBaseIndexProgress ({ id }: { id: number }) {\n  const { progress, isLoading } = useKnowledgeBaseIndexProgress(id);\n\n  return (\n    <>\n      <div className=\"grid sm:grid-cols-2 md:grid-cols-4 gap-4\">\n        <TotalCard\n          title=\"Documents\"\n          icon={<FileTextIcon className=\"h-4 w-4 text-muted-foreground\" />}\n          total={progress?.documents.total}\n          isLoading={isLoading}\n        >\n          <Link className=\"flex gap-2 items-center\" href={`/knowledge-bases/${id}`}>All documents <ArrowRightIcon className=\"size-3\" /></Link>\n        </TotalCard>\n        <TotalCard\n          title=\"Chunks\"\n          icon={<PuzzleIcon className=\"h-4 w-4 text-muted-foreground\" />}\n          total={progress?.chunks.total}\n          isLoading={isLoading}\n        />\n        <TotalCard\n          title=\"Entities\"\n          icon={<RouteIcon className=\"h-4 w-4 text-muted-foreground\" />}\n          total={progress?.entities?.total || null}\n          isLoading={isLoading}\n        />\n        <TotalCard\n          title=\"Relationships\"\n          icon={<RouteIcon className=\"h-4 w-4 text-muted-foreground\" />}\n          total={progress?.relationships?.total || null}\n          isLoading={isLoading}\n        />\n      </div>\n      <div className=\"mt-4 grid grid-cols-2 gap-4\">\n        {progress ? <IndexProgressChart title=\"Vector Index\" data={progress.vector_index} label=\"Total Documents\" /> : <IndexProgressChartPlaceholder title=\"Vector Index\" label=\"Total Documents\" />}\n        {progress?.kg_index ? <IndexProgressChart title=\"Knowledge Graph Index\" data={progress.kg_index} label=\"Total Chunks\" /> : <IndexProgressChartPlaceholder title=\"Knowledge Graph Index\" label=\"Total Chunks\" />}\n      </div>\n      <KnowledgeBaseIndexErrors id={id} />\n    </>\n  );\n}\n\nexport function KnowledgeBaseIndexErrors ({ id }: { id: number }) {\n  const { progress, mutate } = useKnowledgeBaseIndexProgress(id);\n\n  if (!progress) {\n    return null;\n  }\n  const showVectorIndexErrors = !!progress.vector_index.failed;\n  const showKgIndexErrors = !!progress.kg_index?.failed;\n\n  if (!showVectorIndexErrors && !showKgIndexErrors) {\n    return null;\n  }\n\n  return (\n    <section className=\"space-y-4\">\n      <h3>Failed Tasks</h3>\n      <Tabs defaultValue={showVectorIndexErrors ? 'vector-index-errors' : 'kg-index-errors'}>\n        <div className=\"flex items-center\">\n          <TabsList>\n            <TabsTrigger value=\"vector-index-errors\">\n              Vector Index\n            </TabsTrigger>\n            <TabsTrigger value=\"kg-index-errors\">\n              KnowledgeGraph Index\n            </TabsTrigger>\n          </TabsList>\n          <DangerousActionButton\n            className=\"ml-auto\"\n            action={async () => {\n              await retryKnowledgeBaseAllFailedTasks(id);\n              await mutate(undefined, { revalidate: true });\n            }}\n            dialogTitle=\"Retry failed tasks\"\n            dialogDescription=\"Are you sure to retry all failed tasks?\"\n          >\n            Retry failed tasks\n          </DangerousActionButton>\n\n        </div>\n        <TabsContent value=\"vector-index-errors\">\n          <KBVectorIndexErrorsTable kb_id={id} />\n        </TabsContent>\n        <TabsContent value=\"kg-index-errors\">\n          <KBKGIndexErrorsTable kb_id={id} />\n        </TabsContent>\n      </Tabs>\n    </section>\n  );\n}\n\nfunction KBVectorIndexErrorsTable ({ kb_id }: { kb_id: number }) {\n  return (\n    <DataTableRemote<DatasourceVectorIndexError, any>\n      api={(params) => listKnowledgeBaseVectorIndexErrors(kb_id, params)}\n      apiKey={`datasources.${kb_id}.vector-index-errors`}\n      columns={getVectorIndexErrorsColumns(kb_id)}\n      idColumn=\"document_id\"\n    />\n  );\n}\n\nfunction KBKGIndexErrorsTable ({ kb_id }: { kb_id: number }) {\n  return (\n    <DataTableRemote<DatasourceKgIndexError, any>\n      api={(params) => listKnowledgeBaseKgIndexErrors(kb_id, params)}\n      apiKey={`datasources.${kb_id}.kg-index-errors`}\n      columns={getKgIndexErrorsColumns(kb_id)}\n      idColumn=\"chunk_id\"\n    />\n  );\n}\n\nconst vectorIndexErrorsHelper = createColumnHelper<DatasourceVectorIndexError>();\nconst getVectorIndexErrorsColumns = (kb_id: number): ColumnDef<DatasourceVectorIndexError, any>[] => {\n  return [\n    vectorIndexErrorsHelper.display({\n      header: 'Document', cell: ({ row }) => (\n        <>\n          {row.original.document_name}\n          {' '}\n          <span className=\"text-muted-foreground\">#{row.original.document_id}</span>\n        </>\n      ),\n    }),\n    vectorIndexErrorsHelper.accessor('source_uri', {\n      header: 'Source URI',\n      cell: link({ icon: <DownloadIcon className=\"size-3\" />, truncate: true })\n    }),\n    vectorIndexErrorsHelper.accessor('error', {\n      header: 'Error message',\n      cell: errorMessageCell(),\n    }),\n    vectorIndexErrorsHelper.display({\n      id: 'op',\n      cell: actions(row => [\n        {\n          type: 'label',\n          title: 'Actions',\n        },\n        {\n          key: 'rebuild-index',\n          title: 'Rebuild Index',\n          icon: <WrenchIcon className=\"size-3\" />,\n          action: async (context) => {\n            try {\n              await rebuildKBDocumentIndex(kb_id, row.document_id);\n              context.table.reload?.();\n              context.startTransition(() => {\n                context.router.refresh();\n              });\n              context.setDropdownOpen(false);\n              toast.success(`Successfully rebuild index for document \"${row.document_name}\"`);\n            } catch (e) {\n              toast.error(`Failed to rebuild index for document \"${row.document_name}\"`, {\n                description: getErrorMessage(e),\n              });\n              return Promise.reject(e);\n            }\n          },\n        },\n      ]),\n    }),\n  ]\n};\n\nconst kgIndexErrorsHelper = createColumnHelper<DatasourceKgIndexError>();\nconst getKgIndexErrorsColumns = (kb_id: number): ColumnDef<DatasourceKgIndexError, any>[] => {\n  return [\n    kgIndexErrorsHelper.display({\n      header: 'Document',\n      cell: ({ row }) => (\n      <>\n        {row.original.document_name}\n        {' '}\n        <span className=\"text-muted-foreground\">#{row.original.document_id}</span>\n      </>\n    ),\n    }),\n    kgIndexErrorsHelper.accessor('source_uri', {\n      header: 'Source URI',\n      cell: link({ icon: <DownloadIcon className=\"size-3\" />, truncate: true })\n    }),\n    kgIndexErrorsHelper.accessor('chunk_id', { header: 'Chunk ID' }),\n    kgIndexErrorsHelper.accessor('error', {\n      header: 'Error message',\n      cell: errorMessageCell(),\n    }),\n    kgIndexErrorsHelper.display({\n      id: 'op',\n      cell: actions(row => [\n        {\n          type: 'label',\n          title: 'Actions',\n        },\n        {\n          key: 'rebuild-index',\n          title: 'Rebuild Index',\n          icon: <WrenchIcon className=\"size-3\" />,\n          action: async (context) => {\n            try {\n              await rebuildKBDocumentIndex(kb_id, row.document_id);\n              context.table.reload?.();\n              context.startTransition(() => {\n                context.router.refresh();\n              });\n              context.setDropdownOpen(false);\n              toast.success(`Successfully rebuild knowledge graph index for document \"${row.document_name}\"`);\n            } catch (e) {\n              toast.error(`Failed to rebuild knowledge graph index for document \"${row.document_name}\"`, {\n                description: getErrorMessage(e),\n              });\n              return Promise.reject(e);\n            }\n          },\n        },\n      ]),\n    }),\n  ]\n};\n"
  },
  {
    "path": "frontend/app/src/components/knowledge-base/knowledge-base-settings-form.tsx",
    "content": "'use client';\n\nimport { type KnowledgeBase, type KnowledgeBaseIndexMethod, updateKnowledgeBase } from '@/api/knowledge-base';\nimport { EmbeddingModelSelect, LLMSelect } from '@/components/form/biz';\nimport { FormInput, FormSwitch, FormTextarea } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { mutateKnowledgeBases } from '@/components/knowledge-base/hooks';\nimport { KnowledgeBaseChunkingConfigFields } from '@/components/knowledge-base/knowledge-base-chunking-config-fields';\nimport { fieldAccessor, type GeneralSettingsFieldAccessor, GeneralSettingsForm, shallowPick } from '@/components/settings-form';\nimport { GeneralSettingsField as GeneralSettingsField } from '@/components/settings-form/GeneralSettingsField';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { format } from 'date-fns';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\nimport { z } from 'zod';\n\nconst field = formFieldLayout<{ value: any }>();\n\nexport function KnowledgeBaseSettingsForm ({ knowledgeBase }: { knowledgeBase: KnowledgeBase }) {\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  return (\n    <GeneralSettingsForm\n      data={knowledgeBase}\n      readonly={false}\n      loading={transitioning}\n      onUpdate={async (data, path) => {\n        if (['name', 'description', 'chunking_config'].includes(path[0] as never)) {\n          const partial = shallowPick(data, path as never);\n          await updateKnowledgeBase(knowledgeBase.id, partial);\n          startTransition(() => {\n            router.refresh();\n            mutateKnowledgeBases();\n          });\n        } else {\n          throw new Error(`${path.map(p => String(p)).join('.')} is not updatable currently.`);\n        }\n      }}>\n      <GeneralSettingsField schema={nameSchema} accessor={nameAccessor}>\n        <field.Basic name=\"value\" label=\"Name\">\n          <FormInput />\n        </field.Basic>\n      </GeneralSettingsField>\n      <GeneralSettingsField schema={descriptionSchema} accessor={descriptionAccessor}>\n        <field.Basic name=\"value\" label=\"Description\">\n          <FormTextarea />\n        </field.Basic>\n      </GeneralSettingsField>\n      <GeneralSettingsField readonly schema={llmSchema} accessor={llmAccessor}>\n        <field.Basic name=\"value\" label=\"LLM\">\n          <LLMSelect />\n        </field.Basic>\n      </GeneralSettingsField>\n      <GeneralSettingsField readonly schema={embeddingModelSchema} accessor={embeddingModelAccessor}>\n        <field.Basic name=\"value\" label=\"Embedding Model\">\n          <EmbeddingModelSelect />\n        </field.Basic>\n      </GeneralSettingsField>\n      <div className=\"space-y-2\">\n        <div className=\"text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70\">Index Methods</div>\n        <div className=\"space-y-2 pt-2\">\n          <GeneralSettingsField readonly accessor={vectorAccessor} schema={vectorSchema}>\n            <field.Contained name=\"value\" label=\"Vector Index\" description=\"Use vector embedding to represent documents so that relevant documents can be retrieved based on their semantics\">\n              <FormSwitch />\n            </field.Contained>\n          </GeneralSettingsField>\n          <GeneralSettingsField readonly accessor={kgAccessor} schema={kgSchema}>\n            <field.Contained name=\"value\" label=\"Knowledge Graph Index\" description=\"Extract the entities and relationships form the documents and use knowledge graphs to represent, enhance the logic and reasoning capabilities of the retrieval process\">\n              <FormSwitch />\n            </field.Contained>\n          </GeneralSettingsField>\n        </div>\n      </div>\n      <KnowledgeBaseChunkingConfigFields />\n      <GeneralSettingsField readonly schema={createdAtSchema} accessor={createdAtAccessor}>\n        <field.Basic name=\"value\" label=\"Created At\">\n          <FormInput />\n        </field.Basic>\n      </GeneralSettingsField>\n      <GeneralSettingsField readonly schema={updatedAtSchema} accessor={updatedAtAccessor}>\n        <field.Basic name=\"value\" label=\"Updated At\">\n          <FormInput />\n        </field.Basic>\n      </GeneralSettingsField>\n    </GeneralSettingsForm>\n  );\n}\n\nconst getIndexMethodAccessor = (method: KnowledgeBaseIndexMethod): GeneralSettingsFieldAccessor<KnowledgeBase, boolean> => ({\n  path: ['index_methods'],\n  get: data => data.index_methods.includes('vector'),\n  set: (data, value) => {\n    if (value) {\n      return {\n        ...data,\n        index_methods: Array.from(new Set(data.index_methods.concat(method))),\n      };\n    } else {\n      return {\n        ...data,\n        index_methods: data.index_methods.filter(m => m !== method),\n      };\n    }\n  },\n});\nconst getDatetimeAccessor = (key: KeyOfType<KnowledgeBase, Date>): GeneralSettingsFieldAccessor<KnowledgeBase, string> => {\n  return {\n    path: [key],\n    get (data) {\n      return format(data[key], 'yyyy-MM-dd HH:mm:ss');\n    },\n    set () {\n      throw new Error(`update ${key} is not supported`);\n    },\n  };\n};\n\nconst nameSchema = z.string();\nconst nameAccessor = fieldAccessor<KnowledgeBase, 'name'>('name');\n\nconst descriptionSchema = z.string();\nconst descriptionAccessor = fieldAccessor<KnowledgeBase, 'description'>('description');\n\nconst vectorSchema = z.boolean();\nconst vectorAccessor = getIndexMethodAccessor('vector');\n\nconst kgSchema = z.boolean();\nconst kgAccessor = getIndexMethodAccessor('knowledge_graph');\n\nconst llmSchema = z.number();\nconst llmAccessor: GeneralSettingsFieldAccessor<KnowledgeBase, number | undefined> = {\n  path: ['llm'],\n  get (data) {\n    return data.llm?.id;\n  },\n  set () {\n    throw new Error('TODO');\n  },\n};\n\nconst embeddingModelSchema = z.number();\nconst embeddingModelAccessor: GeneralSettingsFieldAccessor<KnowledgeBase, number | undefined> = {\n  path: ['embedding_model'],\n  get (data) {\n    return data.embedding_model?.id;\n  },\n  set () {\n    throw new Error('TODO');\n  },\n};\n\nconst createdAtSchema = z.string();\nconst createdAtAccessor = getDatetimeAccessor('created_at');\n\nconst updatedAtSchema = z.string();\nconst updatedAtAccessor = getDatetimeAccessor('updated_at');\n"
  },
  {
    "path": "frontend/app/src/components/llm/CreateLLMForm.tsx",
    "content": "'use client';\n\nimport { type CreateLLM, createLlm, type LLM, testLlm } from '@/api/llms';\nimport { ProviderSelect } from '@/components/form/biz';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { FormRootError } from '@/components/form/root-error';\nimport { onSubmitHelper } from '@/components/form/utils';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { useLLMProviders } from '@/components/llm/hooks';\nimport { ProviderDescription } from '@/components/provider-description';\nimport { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '@/components/ui/accordion';\nimport { Form, formDomEventHandlers, FormSubmit } from '@/components/ui/form.beta';\nimport { useModelProvider } from '@/hooks/use-model-provider';\nimport { zodJsonText } from '@/lib/zod';\nimport { useForm } from '@tanstack/react-form';\nimport { useId, useState } from 'react';\nimport { toast } from 'sonner';\nimport { z } from 'zod';\n\nconst unsetForm = z.object({\n  name: z.string().min(1, 'Must not empty'),\n  provider: z.string().min(1, 'Must not empty'),\n  config: zodJsonText().optional(),\n  is_default: z.boolean().optional(),\n});\n\nconst strCredentialForm = unsetForm.extend({\n  model: z.string().min(1, 'Must not empty'),\n  credentials: z.string().min(1, 'Must not empty'),\n});\n\nconst dictCredentialForm = unsetForm.extend({\n  model: z.string().min(1, 'Must not empty'),\n  credentials: zodJsonText(),\n});\n\nconst field = formFieldLayout<CreateLLM>();\n\nexport function CreateLLMForm ({ transitioning, onCreated }: { transitioning?: boolean, onCreated?: (llm: LLM) => void }) {\n  const id = useId();\n  const { data: options, isLoading, error } = useLLMProviders();\n  const [submissionError, setSubmissionError] = useState<unknown>();\n\n  const form = useForm<CreateLLM | Omit<CreateLLM, 'model' | 'credentials'>>({\n    validators: {\n      onSubmit: unsetForm,\n    },\n    onSubmit (props) {\n      const { value } = props;\n      const provider = options?.find(option => option.provider === value.provider);\n\n      const schema = provider\n        ? provider.credentials_type === 'str'\n          ? strCredentialForm\n          : provider.credentials_type === 'dict'\n            ? dictCredentialForm\n            : unsetForm\n        : unsetForm;\n\n      return onSubmitHelper(schema, async (values) => {\n        const { error, success } = await testLlm(values as CreateLLM);\n        if (!success) {\n          throw new Error(error || 'Test LLM failed');\n        }\n        const llm = await createLlm(values as CreateLLM);\n        toast.success(`LLM ${llm.name} successfully created.`);\n        onCreated?.(llm);\n      }, setSubmissionError)(props);\n    },\n    defaultValues: {\n      name: '',\n      provider: '',\n      is_default: false,\n      config: '{}',\n    },\n  });\n\n  const provider = useModelProvider(form, options, 'default_llm_model');\n\n  return (\n    <>\n      <Form form={form} disabled={transitioning} submissionError={submissionError}>\n        <form id={id} className=\"space-y-4 max-w-screen-sm\" {...formDomEventHandlers(form, transitioning)}>\n          <field.Basic name=\"name\" label=\"Name\">\n            <FormInput />\n          </field.Basic>\n          <field.Basic name=\"provider\" label=\"Provider\" description={provider && <ProviderDescription provider={provider} />}>\n            <ProviderSelect options={options} isLoading={isLoading} error={error} />\n          </field.Basic>\n          {provider && (\n            <>\n              <field.Basic name=\"model\" label=\"Model\" description={provider.llm_model_description}>\n                <FormInput />\n              </field.Basic>\n              <field.Basic name=\"credentials\" label={provider.credentials_display_name} description={provider.credentials_description}>\n                {provider.credentials_type === 'str'\n                  ? <FormInput placeholder={provider.default_credentials} />\n                  : <CodeInput language=\"json\" placeholder={JSON.stringify(provider.default_credentials, undefined, 2)} />\n                }\n              </field.Basic>\n              <Accordion type=\"multiple\">\n                <AccordionItem value=\"advanced-settings\">\n                  <AccordionTrigger>\n                    Advanced Settings\n                  </AccordionTrigger>\n                  <AccordionContent className=\"px-4\">\n                    <field.Basic name=\"config\" label=\"Config\" description={provider.config_description}>\n                      <CodeInput language=\"json\" />\n                    </field.Basic>\n                  </AccordionContent>\n                </AccordionItem>\n              </Accordion>\n            </>\n          )}\n          <FormRootError title=\"Failed to create LLM\" />\n          <FormSubmit disabled={!options} transitioning={transitioning} form={id}>\n            Create LLM\n          </FormSubmit>\n        </form>\n      </Form>\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/llm/LLMsTable.tsx",
    "content": "'use client';\n\nimport { setDefault } from '@/api/commons';\nimport { deleteLlm, listLlms, type LLM } from '@/api/llms';\nimport { actions } from '@/components/cells/actions';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { Badge } from '@/components/ui/badge';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { TrashIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { toast } from 'sonner';\n\nexport function LLMsTable () {\n  return (\n    <DataTableRemote\n      columns={columns}\n      apiKey=\"api.llms.list\"\n      api={listLlms}\n      idColumn=\"id\"\n    />\n  );\n}\n\nconst helper = createColumnHelper<LLM>();\nconst columns: ColumnDef<LLM, any>[] = [\n  helper.accessor('id', {\n    header: 'ID',\n    cell: ({ row }) => row.original.id,\n  }),\n  helper.accessor('name', {\n    header: 'NAME',\n    cell: ({ row }) => {\n      const { id, name, is_default } = row.original;\n      return (\n        <Link className=\"flex gap-1 items-center underline\" href={`/llms/${id}`}>\n          {is_default && <Badge>default</Badge>}\n          {name}\n        </Link>\n      );\n    },\n  }),\n  helper.display({\n    header: 'PROVIDER / MODEL',\n    cell: ({ row }) => {\n      const { model, provider } = row.original;\n      return (\n        <>\n          <strong>{provider}</strong>/<span>{model}</span>\n        </>\n      );\n    },\n  }),\n  helper.display({\n    id: 'Operations',\n    header: 'ACTIONS',\n    cell: actions(row => ([\n      {\n        key: 'set-default',\n        title: 'Set Default',\n        disabled: row.is_default,\n        action: async (context) => {\n          try {\n            await setDefault('llms', row.id);\n            context.table.reload?.();\n            context.startTransition(() => {\n              context.router.refresh();\n            });\n            context.setDropdownOpen(false);\n            toast.success(`Successfully set default LLM to ${row.name}.`);\n          } catch (e) {\n            toast.error(`Failed to set default LLM to ${row.name}.`, {\n              description: getErrorMessage(e),\n            });\n            throw e;\n          }\n        },\n      },\n      {\n        key: 'delete',\n        action: async ({ table, setDropdownOpen }) => {\n          await deleteLlm(row.id);\n          table.reload?.();\n          setDropdownOpen(false);\n        },\n        title: 'Delete',\n        icon: <TrashIcon className=\"size-3\" />,\n        dangerous: {},\n      },\n    ])),\n  }),\n];\n"
  },
  {
    "path": "frontend/app/src/components/llm/LlmInfo.tsx",
    "content": "'use client';\n\nimport { useLlm } from '@/components/llm/hooks';\nimport { ModelComponentInfo } from '@/components/model-component-info';\n\nexport function LlmInfo ({ className, id }: { className?: string, id: number | undefined | null }) {\n  const { llm, isLoading } = useLlm(id);\n\n  return <ModelComponentInfo\n    className={className}\n    model={llm}\n    url={llm => `/llms/${llm.id}`}\n    isLoading={isLoading}\n    defaultName=\"Default LLM\"\n  />;\n}\n"
  },
  {
    "path": "frontend/app/src/components/llm/UpdateLLMForm.tsx",
    "content": "'use client';\n\nimport { setDefault } from '@/api/commons';\nimport { deleteLlm, type LLM, updateLlm, type UpdateLLM } from '@/api/llms';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { ProviderSelect } from '@/components/form/biz';\nimport { FormInput, FormSwitch } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { useLLMProviders } from '@/components/llm/hooks';\nimport { fieldAccessor, GeneralSettingsField, type GeneralSettingsFieldAccessor, GeneralSettingsForm } from '@/components/settings-form';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { zodJsonText } from '@/lib/zod';\nimport { format } from 'date-fns';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\nimport { z } from 'zod';\n\nexport function UpdateLlmForm ({ llm }: { llm: LLM }) {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n  const { data: options, isLoading, error } = useLLMProviders();\n\n  const provider = options?.find(option => option.provider === llm.provider);\n\n  return (\n    <div className=\"max-w-screen-sm space-y-4\">\n      <GeneralSettingsForm<UpdateLLM>\n        data={llm}\n        readonly={false}\n        loading={transitioning}\n        onUpdate={async (data, path) => {\n          if (path[0] === 'is_default') {\n            await setDefault('llms', llm.id);\n          } else {\n            const key = path[0] as keyof UpdateLLM;\n            await updateLlm(llm.id, {\n              [key]: data[key],\n            });\n          }\n          startTransition(() => {\n            router.refresh();\n          });\n        }}\n      >\n        <GeneralSettingsField accessor={idAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"ID\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={nameAccessor} schema={nameSchema}>\n          <field.Basic label=\"Name\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={providerAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Provider\" name=\"value\" description={provider?.provider_description}>\n            <ProviderSelect options={options} isLoading={isLoading} error={error} />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={modelAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Model\" name=\"value\" description={provider?.llm_model_description}>\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        {provider && (\n          provider.credentials_type === 'str'\n            ? (\n              <GeneralSettingsField accessor={stringCredentialAccessor} schema={stringCredentialSchema}>\n                <field.Basic label=\"Credentials\" name=\"value\" description={provider?.credentials_description}>\n                  <FormInput placeholder={provider.default_credentials} />\n                </field.Basic>\n              </GeneralSettingsField>\n            ) : (\n              <GeneralSettingsField accessor={dictCredentialAccessor} schema={dictCredentialSchema}>\n                <field.Basic label=\"Credentials\" name=\"value\" description={provider?.credentials_description}>\n                  <CodeInput language=\"json\" placeholder={JSON.stringify(provider.default_credentials, undefined, 2)} />\n                </field.Basic>\n              </GeneralSettingsField>\n            )\n        )}\n        <GeneralSettingsField accessor={configAccessor} schema={configSchema}>\n          <field.Basic label=\"Config\" name=\"value\" description={provider?.config_description}>\n            <CodeInput language=\"json\" />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={isDefaultAccessor} schema={anySchema}>\n          <field.Contained label=\"Is Default\" name=\"value\">\n            <FormSwitch />\n          </field.Contained>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={createdAtAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Created At\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={updatedAtAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Updated At\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n      </GeneralSettingsForm>\n      <DangerousActionButton\n        variant=\"destructive\"\n        disabled={transitioning}\n        action={async () => {\n          await deleteLlm(llm.id);\n          startTransition(() => {\n            router.push('/llms');\n          });\n        }}\n      >\n        Delete\n      </DangerousActionButton>\n    </div>\n  );\n}\n\nconst field = formFieldLayout<{ value: any | any[] }>();\n\nconst anySchema = z.any();\n\nconst getDatetimeAccessor = (key: KeyOfType<LLM, Date | undefined | null>): GeneralSettingsFieldAccessor<LLM, string> => {\n  return {\n    path: [key],\n    get (data) {\n      return format(data[key], 'yyyy-MM-dd HH:mm:ss');\n    },\n    set () {\n      throw new Error(`update ${key} is not supported`);\n    },\n  };\n};\n\nconst nameSchema = z.string();\nconst configSchema = zodJsonText();\n\nconst nameAccessor = fieldAccessor<UpdateLLM, 'name'>('name');\nconst idAccessor = fieldAccessor<LLM, 'id'>('id');\nconst providerAccessor = fieldAccessor<LLM, 'provider'>('provider');\nconst modelAccessor = fieldAccessor<LLM, 'model'>('model');\nconst configAccessor: GeneralSettingsFieldAccessor<UpdateLLM, string> = {\n  path: ['config'],\n  get (data) {\n    return JSON.stringify(data.config, undefined, 2);\n  },\n  set (data, value) {\n    return {\n      ...data,\n      // TODO: This is already converted to object by zodJsonText().\n      config: value,\n    };\n  },\n};\nconst isDefaultAccessor = fieldAccessor<LLM, 'is_default'>('is_default');\nconst createdAtAccessor = getDatetimeAccessor('created_at');\nconst updatedAtAccessor = getDatetimeAccessor('updated_at');\n\nconst stringCredentialSchema = z.string().optional();\nconst dictCredentialSchema = zodJsonText();\n\nconst stringCredentialAccessor = fieldAccessor<UpdateLLM, 'credentials'>('credentials', '');\nconst dictCredentialAccessor: GeneralSettingsFieldAccessor<UpdateLLM, string> = {\n  path: ['credentials'],\n  get (data) {\n    return JSON.stringify(data.config, undefined, 2);\n  },\n  set (data, value) {\n    return {\n      ...data,\n      // TODO: This is already converted to object by zodJsonText().\n      config: value,\n    };\n  },\n};"
  },
  {
    "path": "frontend/app/src/components/llm/hooks.ts",
    "content": "import { listLlmOptions, listLlms } from '@/api/llms';\nimport { listAllHelper } from '@/lib/request';\nimport useSWR from 'swr';\n\nexport function useAllLlms (flag: boolean = true) {\n  return useSWR(flag && 'api.llms.list-all', () => listAllHelper(listLlms, 'id'));\n}\n\nexport function useLlm (id: number | null | undefined) {\n  const { data, mutate, ...rest } = useAllLlms(id != null);\n\n  return {\n    llm: data?.find(llm => llm.id === id),\n    ...rest,\n  };\n}\n\nexport function useLLMProviders () {\n  return useSWR('api.llms.list-options', listLlmOptions);\n}"
  },
  {
    "path": "frontend/app/src/components/loader.tsx",
    "content": "'use client';\n\nimport { cn } from '@/lib/utils';\nimport { Loader2Icon, LoaderIcon } from 'lucide-react';\nimport { type ReactNode, useEffect, useState } from 'react';\n\nexport function Loader ({ loading, children = 'Loading data' }: { loading: boolean, children?: ReactNode }) {\n  const [mounted, setMounted] = useState(loading);\n\n  useEffect(() => {\n    if (!loading) {\n      const h = setTimeout(() => {\n        setMounted(loading);\n      }, 200);\n\n      return () => {\n        clearTimeout(h);\n      };\n    } else {\n      setMounted(true);\n    }\n  }, [loading]);\n\n  if (mounted || loading) {\n    return (\n      <div className={cn(\n        'rounded-md absolute z-10 bg-background/90 left-0 top-0 w-full h-full flex items-center justify-center transition-opacity duration-200 select-none',\n        loading ? 'opacity-100' : 'opacity-0',\n      )}>\n        <span className=\"flex gap-2 items-center\">\n          <Loader2Icon className=\"animate-spin\" />\n          <span>{children}</span>\n        </span>\n      </div>\n    );\n  } else {\n    return null;\n  }\n\n}"
  },
  {
    "path": "frontend/app/src/components/managed-dialog-close.tsx",
    "content": "import { useManagedDialog } from '@/components/managed-dialog';\nimport type { ReactNode } from 'react';\n\nexport function ManagedDialogClose ({ children }: { children: (close: () => void) => ReactNode }) {\n\n  const { setOpen } = useManagedDialog();\n\n  return children(() => {\n    setOpen(false);\n  });\n}\n"
  },
  {
    "path": "frontend/app/src/components/managed-dialog.tsx",
    "content": "'use client';\n\nimport { ManagedPanelContext } from '@/components/managed-panel';\nimport { Dialog } from '@/components/ui/dialog';\nimport { type ComponentProps, useState } from 'react';\n\nexport interface ManagedDialogProps extends Omit<ComponentProps<typeof Dialog>, 'open' | 'onOpenChange'> {\n}\n\nexport function ManagedDialog (props: ManagedDialogProps) {\n  const [open, setOpen] = useState(false);\n\n  return (\n    <ManagedPanelContext.Provider value={{ open, setOpen }}>\n      <Dialog open={open} onOpenChange={setOpen} {...props} />\n    </ManagedPanelContext.Provider>\n  );\n}\n\nexport { useManagedPanel as useManagedDialog } from './managed-panel';\n\n"
  },
  {
    "path": "frontend/app/src/components/managed-panel.tsx",
    "content": "'use client';\n\nimport { createContext, type Dispatch, type SetStateAction, useContext } from 'react';\n\nexport const ManagedPanelContext = createContext<{ open: boolean, setOpen: Dispatch<SetStateAction<boolean>> }>({\n  open: false,\n  setOpen: () => {},\n});\n\nexport function useManagedPanel () {\n  return useContext(ManagedPanelContext);\n}\n"
  },
  {
    "path": "frontend/app/src/components/model-component-info.tsx",
    "content": "import { cn } from '@/lib/utils';\nimport { Loader2Icon } from 'lucide-react';\nimport Link from 'next/link';\n\ntype ModelBase = {\n  provider?: string\n  id: number\n  name: string\n  model: string\n}\n\nexport interface ModelComponentInfoProps<Model extends ModelBase> {\n  className?: string;\n  isLoading?: boolean;\n  model: Model | null | undefined;\n  url: (model: Model) => string;\n  defaultName?: string;\n}\n\nexport function ModelComponentInfo<Model extends ModelBase> ({ className, isLoading = false, model, url, defaultName }: ModelComponentInfoProps<Model>) {\n  if (isLoading) {\n    return <Loader2Icon className={cn('size-4 animate-spin repeat-infinite', className)} />;\n  }\n\n  if (!model) {\n    return defaultName && <span className={cn('text-muted-foreground', className)}>{defaultName}</span>;\n  }\n\n  return (\n    <span className={cn('flex gap-1 items-center', className)}>\n      <Link className=\"font-bold underline\" href={url(model)} target=\"_blank\">{model.name}</Link>\n      <span className=\"text-muted-foreground\">\n        <strong>{model.provider}</strong>:{model.model}\n      </span>\n    </span>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/nextjs/NextLink.tsx",
    "content": "'use client';\n\nimport { type ButtonProps, buttonVariants } from '@/components/ui/button';\nimport { cn } from '@/lib/utils';\nimport Link, { type LinkProps } from 'next/link';\nimport { useRouter } from 'next/navigation';\nimport { forwardRef, MouseEvent, useTransition } from 'react';\n\nexport interface NextLinkProps extends Pick<LinkProps, 'prefetch' | 'scroll' | 'onClick' | 'replace'>, Pick<ButtonProps, 'className' | 'style' | 'variant' | 'size' | 'disabled' | 'children'> {\n  disabled?: boolean;\n  href: string;\n}\n\nexport const NextLink = forwardRef<HTMLAnchorElement, NextLinkProps>(({ className, disabled: propDisabled, onClick, href, replace, scroll, variant, size, ...props }, ref) => {\n  const [navigating, startTransition] = useTransition();\n  const router = useRouter();\n\n  const disabled = navigating || !!propDisabled;\n\n  const handleClick = (event: MouseEvent<HTMLAnchorElement>) => {\n    if (disabled) {\n      event.preventDefault();\n      return;\n    }\n    if (event.ctrlKey || event.shiftKey || event.metaKey || event.altKey) {\n      event.persist();\n      return;\n    }\n    onClick?.(event);\n    if (event.defaultPrevented) {\n      return;\n    }\n    event.preventDefault();\n\n    startTransition(() => {\n      if (replace) {\n        router.replace(href, { scroll });\n      } else {\n        router.push(href, { scroll });\n      }\n    });\n  };\n\n  return (\n    <Link\n      {...props}\n      onClick={handleClick}\n      className={cn(buttonVariants({ variant, size }), 'aria-disabled:pointer-events-none aria-disabled:opacity-50', navigating && '!cursor-wait', className)}\n      replace={replace}\n      aria-disabled={disabled}\n      href={href}\n      scroll={scroll}\n      ref={ref}\n      role=\"button\"\n    />\n  );\n});\n\nNextLink.displayName = 'NextLink';\n"
  },
  {
    "path": "frontend/app/src/components/option-detail.tsx",
    "content": "import { cn } from '@/lib/utils';\nimport type { ReactNode } from 'react';\n\nexport function OptionDetail ({\n  valueClassName,\n  title,\n  value,\n}: {\n  valueClassName?: string\n  title: string\n  value: ReactNode\n}) {\n  return (\n    <div className=\"flex items-center gap-2\">\n      <dt className=\"text-muted-foreground text-xs\">{title}</dt>\n      <dd className={cn('font-medium ml-auto', valueClassName)}>{value}</dd>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/portal-provider.tsx",
    "content": "import { createContext, type ReactNode, useContext } from 'react';\n\nconst PortalContext = createContext<HTMLElement | undefined>(undefined);\n\nexport function PortalProvider ({ children, container }: { children: ReactNode, container: HTMLElement | undefined }) {\n  return (\n    <PortalContext.Provider value={container}>{children}</PortalContext.Provider>\n  );\n}\n\nexport function usePortalContainer () {\n  return useContext(PortalContext);\n}\n"
  },
  {
    "path": "frontend/app/src/components/provider-description.tsx",
    "content": "import type { ProviderOption } from '@/api/providers';\n\nexport function ProviderDescription ({ provider }: { provider: ProviderOption }) {\n  const { provider_url, provider_description } = provider;\n  if (provider_description) {\n    if (provider_url) {\n      return (\n        <>\n          {provider_description}\n          <br />\n          See <a className=\"underline\" href={provider_url} target=\"_blank\">official website</a> for more details.\n        </>\n      );\n    } else {\n      return provider_description;\n    }\n  } else {\n    return null;\n  }\n}"
  },
  {
    "path": "frontend/app/src/components/py-viewer.tsx",
    "content": "'use client';\n\nimport Highlight from 'highlight.js/lib/core';\nimport python from 'highlight.js/lib/languages/python';\nimport { useEffect, useState } from 'react';\nimport './code-theme.scss';\n\nHighlight.registerLanguage('python', python);\n\nexport function PythonViewer ({ value: propValue }: { value: string }) {\n  const [value, setValue] = useState(() => propValue.replaceAll('<', '&lt;'));\n\n  useEffect(() => {\n    setValue(propValue);\n    try {\n      const { value: result } = Highlight.highlight(propValue, { language: 'python' });\n      setValue(result);\n    } catch {\n    }\n  }, [propValue]);\n\n  return (\n    <code>\n      <pre className=\"whitespace-pre-wrap text-xs font-mono\" dangerouslySetInnerHTML={{ __html: value }} />\n    </code>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/remark-content/components.tsx",
    "content": "import { MessageContextSourceCard } from '@/components/chat/message-content-sources';\nimport { CopyButton } from '@/components/copy-button';\nimport { RemarkContentContext } from '@/components/remark-content/context';\nimport { HoverCard, HoverCardContent, HoverCardTrigger } from '@/components/ui/hover-card';\nimport { cn } from '@/lib/utils';\nimport { HoverCardArrow, HoverCardPortal } from '@radix-ui/react-hover-card';\nimport { cloneElement, useContext, useState } from 'react';\nimport { isElement, isFragment } from 'react-is';\nimport * as jsxRuntime from 'react/jsx-runtime';\nimport { Options as RehypeReactOptions } from 'rehype-react';\n\nfunction dirtyRewrite (some: any, id: string): any {\n  if (some == null) return some;\n  if (typeof some !== 'object') return some;\n\n  if (isElement(some) || isFragment(some)) {\n    const props = some.props as any;\n    return cloneElement(some, {\n      ...props,\n      ...props.id ? { id: `${id}--${props.id}` } : {},\n      children: dirtyRewrite(props.children, id),\n    });\n  }\n\n  if (some instanceof Array) {\n    return some.map(item => dirtyRewrite(item, id));\n  }\n\n  return some;\n}\n\nexport const getRehypeReactOptions = ({ portalContainer }: { portalContainer: HTMLElement | undefined }): RehypeReactOptions => ({\n  Fragment: (jsxRuntime as any).Fragment,\n  jsx: (jsxRuntime as any).jsx,\n  jsxs: (jsxRuntime as any).jsxs,\n  passNode: true,\n  components: {\n    section ({ ...props }) {\n      // eslint-disable-next-line react-hooks/rules-of-hooks\n      const { reactId } = useContext(RemarkContentContext);\n\n      if (!(props as any)['data-footnotes']) return <section {...props} />;\n      return (\n        <section {...props} className={cn(props.className /*, 'sr-only'*/)}>\n          {dirtyRewrite(props.children, reactId)}\n        </section>\n      );\n    },\n    a ({ ...props }) {\n      // eslint-disable-next-line react-hooks/rules-of-hooks\n      const { reactId } = useContext(RemarkContentContext);\n\n      // eslint-disable-next-line react-hooks/rules-of-hooks\n      const [link, setLink] = useState<{ title: string, href: string | false }>();\n\n      if (!(props as any)['data-footnote-ref']) return <a {...props} target=\"_blank\" />;\n\n      return (\n        <HoverCard openDelay={0} onOpenChange={open => {\n          if (open) {\n            const id = props.href?.replace(/^#/, '');\n            if (id) {\n              const li = document.getElementById(reactId + '--' + id);\n              if (li) {\n                const a = li.querySelector(`a:first-child:not([data-footnote-backref])`) as HTMLAnchorElement | null;\n                if (a) {\n                  setLink({ title: a.textContent ?? a.href, href: a.href });\n                  return;\n                } else {\n                  const text = li.querySelector('p')?.childNodes?.item(0)?.textContent;\n                  if (text) {\n                    setLink({ title: text, href: false });\n                    return;\n                  }\n                }\n              }\n            }\n            setLink(undefined);\n          }\n        }}>\n          <HoverCardTrigger asChild>\n            <a\n              {...props}\n              className={cn(props.className, 'cursor-default')}\n              href={undefined}\n              onClick={event => {\n                event.preventDefault();\n                event.stopPropagation();\n              }}\n            />\n          </HoverCardTrigger>\n          <HoverCardPortal container={portalContainer}>\n            <HoverCardContent onPointerDownOutside={e => e.preventDefault()} className=\"p-1 w-[200px] overflow-hidden rounded-lg border text-xs\">\n              <HoverCardArrow className=\"fill-border\" />\n              {link\n                ? link.href\n                  ? <MessageContextSourceCard title={link?.title} href={link?.href} />\n                  : link.title\n                : null}\n            </HoverCardContent>\n          </HoverCardPortal>\n        </HoverCard>\n      );\n    },\n    pre ({ children, node, ...props }) {\n      // eslint-disable-next-line react-hooks/rules-of-hooks\n      const { rawContent } = useContext(RemarkContentContext);\n\n      let isCodeBlock = false;\n      let range: [number, number] | undefined;\n      const firstChild = node?.children[0];\n      if (firstChild?.type === 'element' && firstChild.tagName === 'code') {\n        isCodeBlock = true;\n        if (firstChild.position && firstChild.position.start.offset && firstChild.position.end.offset) {\n          range = [firstChild.position.start.offset, firstChild.position.end.offset];\n        }\n      }\n\n      return (\n        <pre {...props}>\n          {children}\n          {isCodeBlock && <div className=\"absolute right-1 top-1 transition-opacity opacity-30 hover:opacity-100\" data-role=\"codeblock-addon\">\n            {range && <CopyButton text={() => parseCode(rawContent, range)} />}\n          </div>}\n        </pre>\n      );\n    },\n  },\n});\n\nfunction parseCode (raw: string, range: [number, number]) {\n  // Unindent prefix tabs?\n  return raw.slice(...range)\n    .replace(/^\\s*```[^\\n]*\\n/, '')\n    .replace(/\\n[^\\n]*```$/, '');\n}\n"
  },
  {
    "path": "frontend/app/src/components/remark-content/context.tsx",
    "content": "import { createContext } from 'react';\n\nexport const RemarkContentContext = createContext<{ reactId: string, rawContent: string }>({ reactId: '', rawContent: '' });\n"
  },
  {
    "path": "frontend/app/src/components/remark-content/highlight.ts",
    "content": "import go from 'highlight.js/lib/languages/go';\nimport java from 'highlight.js/lib/languages/java';\nimport javascript from 'highlight.js/lib/languages/javascript';\nimport markdown from 'highlight.js/lib/languages/markdown';\nimport python from 'highlight.js/lib/languages/python';\nimport ruby from 'highlight.js/lib/languages/ruby';\nimport sql from 'highlight.js/lib/languages/sql';\nimport typescript from 'highlight.js/lib/languages/typescript';\nimport php from 'highlight.js/lib/languages/php';\n\nimport { type Options } from 'rehype-highlight';\n\nexport const rehypeHighlightOptions: Options = {\n  languages: { sql, python, javascript, typescript, markdown, go, java, ruby, php },\n  aliases: {\n    sql: ['mysql', 'tidb'],\n    python: ['py'],\n    javascript: ['js'],\n    typescript: ['ts'],\n    markdown: ['md'],\n    go: ['golang'],\n    ruby: ['rb'],\n  },\n};\n"
  },
  {
    "path": "frontend/app/src/components/remark-content/index.ts",
    "content": "export { RemarkContent } from './remark-content'"
  },
  {
    "path": "frontend/app/src/components/remark-content/remark-content.stories.tsx",
    "content": "import type { Meta, StoryObj } from '@storybook/react';\nimport { RemarkContent } from './remark-content';\n\nconst meta = {\n  title: 'Components/RemarkContent',\n  component: RemarkContent,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: ['autodocs'],\n  decorators: [\n    (Story) => (\n      <div style={{ minWidth: 400, maxWidth: 800 }}>\n        <Story />\n      </div>\n    ),\n  ],\n  argTypes: {\n    children: {\n      type: 'string',\n    },\n  },\n  args: {},\n} satisfies Meta<typeof RemarkContent>;\n\nexport default meta;\n\ntype Story = StoryObj<typeof meta>\n\nexport const Default: Story = {\n  args: {\n    children: `TiDB is an open-source, distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads[^1]. It is designed to provide a scalable, highly available, and MySQL-compatible database solution for large-scale data management. Here's a comprehensive overview of TiDB:\n\nKey Features:\n\n1. Horizontal Scalability: TiDB's architecture separates computing from storage, allowing you to scale out or scale in the computing or storage capacity independently as needed[^2].\n\n2. High Availability: TiDB ensures financial-grade high availability through its multi-replica architecture and Multi-Raft protocol. Data is stored in multiple replicas, and a transaction is only committed when the majority of replicas have successfully written the data[^2].\n\n3. MySQL Compatibility: TiDB is compatible with the MySQL 5.7 protocol, common features, and syntax. This allows for easy migration of existing MySQL applications to TiDB with minimal code changes[^3].\n\n4. HTAP Capabilities: TiDB supports both Online Transactional Processing (OLTP) and Online Analytical Processing (OLAP) workloads. It achieves this through its two storage engines: TiKV (row-based) for transactional processing and TiFlash (columnar) for analytical processing[^1][^2].\n\n5. Cloud-Native Design: TiDB is built for cloud environments, offering flexible scalability, reliability, and security on various cloud platforms. It integrates seamlessly with Kubernetes through TiDB Operator[^2].\n\nArchitecture:\n\nTiDB's architecture consists of several key components:\n\n1. TiDB Server: This is the SQL layer that handles query parsing, optimization, and execution[^4].\n\n2. TiKV: A distributed key-value storage engine that stores the actual data[^4].\n\n3. Placement Driver (PD): The cluster manager that handles metadata management, timestamp allocation, and data placement decisions[^4].\n\n4. TiFlash: A columnar storage engine that replicates data from TiKV in real-time, enabling fast analytical processing[^2].\n\n5. TiDB Binlog: A tool for capturing and replicating data changes in TiDB[^3].\n\nHigh Availability and Scalability:\n\nTiDB achieves high availability and scalability through several mechanisms:\n\n1. Multi-Raft Protocol: This ensures data consistency across replicas and allows for automatic failover when a minority of replicas fail[^2].\n\n2. Separation of Computing and Storage: This architecture allows for independent scaling of compute and storage resources, enabling flexible adaptation to changing workloads[^2].\n\n3. Automatic Sharding: TiDB automatically shards data across TiKV nodes, allowing for seamless horizontal scaling[^4].\n\n4. Load Balancing: The Placement Driver continuously monitors the cluster and automatically balances data and workload across nodes[^4].\n\nExample SQL:\n\nHere's an example of how you might create a table and perform some basic operations in TiDB:\n\n\\`\\`\\`sql\n-- Create a new table\nCREATE TABLE users (\n    id INT PRIMARY KEY AUTO_INCREMENT,\n    name VARCHAR(50),\n    email VARCHAR(100),\n    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n);\n\n-- Insert some data\nINSERT INTO users (name, email) VALUES \n('Alice', 'alice@example.com'),\n('Bob', 'bob@example.com');\n\n-- Query the data\nSELECT * FROM users;\n\n-- Update a record\nUPDATE users SET email = 'alice.new@example.com' WHERE name = 'Alice';\n\n-- Delete a record\nDELETE FROM users WHERE name = 'Bob';\n\\`\\`\\`\n\nThis example demonstrates basic SQL operations that you can perform in TiDB, showcasing its MySQL compatibility.\n\n[^1]: [TiDB Introduction | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-faq#what-is-tidb)\n[^2]: [TiDB Key Features | PingCAP Docs](https://docs.pingcap.com/tidb/v7.5/overview#key-features)\n[^3]: [TiDB Architecture | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-architecture)\n[^4]: [TiDB Architecture Components | PingCAP Docs](https://docs.pingcap.com/tidb/v7.1/tidb-architecture#tidb-architecture)\n`,\n  },\n};\n"
  },
  {
    "path": "frontend/app/src/components/remark-content/remark-content.tsx",
    "content": "'use client';\n\nimport { usePortalContainer } from '@/components/portal-provider';\nimport { cn } from '@/lib/utils';\nimport { type ReactNode, useEffect, useId, useMemo, useState } from 'react';\nimport rehypeHighlight from 'rehype-highlight';\nimport rehypeReact from 'rehype-react';\nimport remarkGfm from 'remark-gfm';\nimport remarkParse from 'remark-parse';\nimport remarkRehype from 'remark-rehype';\nimport { unified } from 'unified';\nimport { getRehypeReactOptions } from './components';\nimport { RemarkContentContext } from './context';\nimport { rehypeHighlightOptions } from './highlight';\nimport '@/components/code-theme.scss';\nimport '@/components/remark-content/style.scss';\n\nexport function RemarkContent ({ className, children = '' }: { className?: string, children: string | undefined }) {\n  const portalContainer = usePortalContainer();\n  const reactId = useId();\n\n  const processFn = useMemo(() => {\n    const processor = unified()\n      .use(remarkParse)\n      .use(remarkGfm)\n      .use(remarkRehype)\n      .use(rehypeHighlight, rehypeHighlightOptions)\n      .use(rehypeReact, getRehypeReactOptions({ portalContainer }))\n      .freeze();\n\n    return (text: string) => processor.processSync(text).result;\n  }, [portalContainer]);\n\n  const [value, setValue] = useState<ReactNode>(processFn(children));\n\n  useEffect(() => {\n    if (children) {\n      try {\n        setValue(processFn(children));\n      } catch {\n        setValue(<div className=\"whitespace-pre-wrap\">{children}</div>);\n      }\n    }\n  }, [children]);\n\n  return (\n    <RemarkContentContext.Provider value={{ reactId, rawContent: children }}>\n      <article className={cn('remark-content prose prose-sm prose-zinc dark:prose-invert overflow-x-hidden break-words max-w-[unset]', className)}>\n        {value}\n      </article>\n    </RemarkContentContext.Provider>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/remark-content/style.scss",
    "content": ".remark-content {\n  sup a[data-footnote-ref=true] {\n    @apply bg-primary text-primary-foreground px-1 mx-0.5 rounded-full no-underline;\n  }\n\n  sup a[data-footnote-ref=true] + a[data-footnote-ref=true] {\n    @apply ml-0.5;\n  }\n\n  [data-footnote-backref] {\n    @apply sr-only;\n  }\n\n  pre:has(> code) {\n    position: relative;\n\n    > div[data-role=\"codeblock-addon\"] {\n      @apply text-foreground;\n    }\n  }\n\n  pre:has(> code.hljs) {\n    padding: 0;\n  }\n\n  pre > code.hljs {\n    background-color: #fafafa;\n  }\n\n  pre:has(> code:not(.hljs)) {\n    background: #fafafa;\n\n    > code {\n      color: #24292e;\n    }\n  }\n}\n\n.dark .remark-content {\n  pre > code.hljs {\n    background-color: #18181b;\n  }\n\n\n  pre:has(> code:not(.hljs)) {\n    background: #18181b;\n\n    > code {\n      color: #c9d1d9;\n    }\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/reranker/CreateRerankerForm.tsx",
    "content": "'use client';\n\nimport { type CreateReranker, createReranker, type Reranker, testReranker } from '@/api/rerankers';\nimport { ProviderSelect } from '@/components/form/biz';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { FormRootError } from '@/components/form/root-error';\nimport { onSubmitHelper } from '@/components/form/utils';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { ProviderDescription } from '@/components/provider-description';\nimport { useRerankerProviders } from '@/components/reranker/hooks';\nimport { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '@/components/ui/accordion';\nimport { Form, formDomEventHandlers, FormSubmit } from '@/components/ui/form.beta';\nimport { useModelProvider } from '@/hooks/use-model-provider';\nimport { zodJsonText } from '@/lib/zod';\nimport { useForm } from '@tanstack/react-form';\nimport { useId, useState } from 'react';\nimport { toast } from 'sonner';\nimport { z } from 'zod';\n\nconst unsetForm = z.object({\n  name: z.string().min(1, 'Must not empty'),\n  provider: z.string().min(1, 'Must not empty'),\n  top_n: z.coerce.number().int().min(1),\n  config: zodJsonText().optional(),\n  is_default: z.boolean().optional(),\n});\n\nconst strCredentialForm = unsetForm.extend({\n  model: z.string().min(1, 'Must not empty'),\n  credentials: z.string().min(1, 'Must not empty'),\n});\n\nconst dictCredentialForm = unsetForm.extend({\n  model: z.string().min(1, 'Must not empty'),\n  credentials: zodJsonText(),\n});\n\nconst field = formFieldLayout<CreateReranker>();\n\nexport function CreateRerankerForm ({ transitioning, onCreated }: { transitioning?: boolean, onCreated?: (reranker: Reranker) => void }) {\n  const id = useId();\n  const { data: options, isLoading, error } = useRerankerProviders();\n  const [submissionError, setSubmissionError] = useState<unknown>();\n\n  const form = useForm<CreateReranker | Omit<CreateReranker, 'model' | 'credentials'>>({\n    validators: {\n      onSubmit: unsetForm,\n    },\n    onSubmit (props) {\n      const { value } = props;\n      const provider = options?.find(option => option.provider === value.provider);\n\n      const schema = provider\n        ? provider.credentials_type === 'str'\n          ? strCredentialForm\n          : provider.credentials_type === 'dict'\n            ? dictCredentialForm\n            : unsetForm\n        : unsetForm;\n\n      return onSubmitHelper(schema, async (values) => {\n        const { error, success } = await testReranker(values as CreateReranker);\n        if (!success) {\n          throw new Error(error || 'Test Reranker failed');\n        }\n        const reranker = await createReranker(values as CreateReranker);\n        toast.success(`Reranker ${reranker.name} successfully created.`);\n        onCreated?.(reranker);\n      }, setSubmissionError)(props);\n    },\n    defaultValues: {\n      name: '',\n      provider: '',\n      is_default: false,\n      top_n: 5,\n      config: '{}',\n    },\n  });\n\n  const provider = useModelProvider(form, options, 'default_reranker_model');\n\n  return (\n    <>\n      <Form form={form} disabled={transitioning} submissionError={submissionError}>\n        <form id={id} className=\"space-y-4 max-w-screen-sm\" {...formDomEventHandlers(form, transitioning)}>\n          <field.Basic name=\"name\" label=\"Name\">\n            <FormInput />\n          </field.Basic>\n          <field.Basic name=\"provider\" label=\"Provider\" description={provider && <ProviderDescription provider={provider} />}>\n            <ProviderSelect options={options} isLoading={isLoading} error={error} />\n          </field.Basic>\n          {provider && (\n            <>\n              <field.Basic name=\"model\" label=\"Model\" description={provider.reranker_model_description}>\n                <FormInput />\n              </field.Basic>\n              <field.Basic name=\"credentials\" label={provider.credentials_display_name} description={provider.credentials_description}>\n                {provider.credentials_type === 'str'\n                  ? <FormInput placeholder={provider.default_credentials} />\n                  : <CodeInput language=\"json\" placeholder={JSON.stringify(provider.default_credentials, undefined, 2)} />\n                }\n              </field.Basic>\n              <Accordion type=\"multiple\">\n                <AccordionItem value=\"advanced-settings\">\n                  <AccordionTrigger>\n                    Advanced Settings\n                  </AccordionTrigger>\n                  <AccordionContent className=\"px-4\">\n                    <field.Basic name=\"config\" label=\"Config\" description={provider.config_description}>\n                      <CodeInput language=\"json\" />\n                    </field.Basic>\n                  </AccordionContent>\n                </AccordionItem>\n              </Accordion>\n            </>\n          )}\n          <field.Basic name=\"top_n\" label=\"Top N\">\n            <FormInput type=\"number\" min={1} step={1} />\n          </field.Basic>\n          <FormRootError title=\"Failed to create Reranker\" />\n          <FormSubmit disabled={!options} transitioning={transitioning} form={id}>\n            Create Reranker\n          </FormSubmit>\n        </form>\n      </Form>\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/reranker/RerankerInfo.tsx",
    "content": "'use client';\n\nimport { ModelComponentInfo } from '@/components/model-component-info';\nimport { useReranker } from '@/components/reranker/hooks';\n\nexport function RerankerInfo ({ className, id }: { className?: string, id: number | undefined | null }) {\n  const { reranker, isLoading } = useReranker(id);\n\n  return <ModelComponentInfo\n    className={className}\n    model={reranker}\n    url={reranker => `/reranker-models/${reranker.id}`}\n    isLoading={isLoading}\n    defaultName=\"Default Reranker Model\"\n  />;\n}\n"
  },
  {
    "path": "frontend/app/src/components/reranker/RerankerModelsTable.tsx",
    "content": "'use client';\n\nimport { setDefault } from '@/api/commons';\nimport { deleteReranker, listRerankers, type Reranker } from '@/api/rerankers';\nimport { actions } from '@/components/cells/actions';\nimport { DataTableRemote } from '@/components/data-table-remote';\nimport { Badge } from '@/components/ui/badge';\nimport { getErrorMessage } from '@/lib/errors';\nimport type { ColumnDef } from '@tanstack/react-table';\nimport { createColumnHelper } from '@tanstack/table-core';\nimport { TrashIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { toast } from 'sonner';\n\nexport default function RerankerModelsTable () {\n  return (\n    <DataTableRemote\n      columns={columns}\n      apiKey=\"api.rerankers.list\"\n      api={listRerankers}\n      idColumn=\"id\"\n    />\n  );\n}\nconst helper = createColumnHelper<Reranker>();\nconst columns: ColumnDef<Reranker, any>[] = [\n  helper.accessor('id', {\n    header: 'ID',\n    cell: ({ row }) => row.original.id,\n  }),\n  helper.accessor('name', {\n    header: 'NAME',\n    cell: ({ row }) => {\n      const { id, name, is_default } = row.original;\n      return (\n        <Link className=\"flex gap-1 items-center underline\" href={`/reranker-models/${id}`}>\n          {is_default && <Badge>default</Badge>}\n          {name}\n        </Link>\n      );\n    },\n  }),\n  helper.display({\n    header: 'PROVIDER / MODEL',\n    cell: ({ row }) => {\n      const { model, provider } = row.original;\n      return (\n        <>\n          <strong>{provider}</strong>/<span>{model}</span>\n        </>\n      );\n    },\n  }),\n  helper.accessor('top_n', {\n    header: 'TOP N',\n  }),\n  helper.display({\n    id: 'Operations',\n    header: 'ACTIONS',\n    cell: actions(row => ([\n      {\n        key: 'set-default',\n        title: 'Set Default',\n        disabled: row.is_default,\n        action: async (context) => {\n          try {\n            await setDefault('reranker-models', row.id);\n            context.table.reload?.();\n            context.startTransition(() => {\n              context.router.refresh();\n            });\n            context.setDropdownOpen(false);\n            toast.success(`Successfully set default Reranker Model to ${row.name}.`);\n          } catch (e) {\n            toast.error(`Failed to set default Reranker Model to ${row.name}.`, {\n              description: getErrorMessage(e),\n            });\n            throw e;\n          }\n        },\n      },\n      {\n        key: 'delete',\n        action: async ({ table, setDropdownOpen }) => {\n          await deleteReranker(row.id);\n          table.reload?.();\n          setDropdownOpen(false);\n        },\n        title: 'Delete',\n        icon: <TrashIcon className=\"size-3\" />,\n        dangerous: {},\n      },\n    ])),\n  }),\n];\n"
  },
  {
    "path": "frontend/app/src/components/reranker/UpdateRerankerForm.tsx",
    "content": "'use client';\n\nimport { setDefault } from '@/api/commons';\nimport { deleteReranker, type Reranker, updateReranker, type UpdateReranker } from '@/api/rerankers';\nimport { DangerousActionButton } from '@/components/dangerous-action-button';\nimport { ProviderSelect } from '@/components/form/biz';\nimport { FormInput, FormSwitch } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { CodeInput } from '@/components/form/widgets/CodeInput';\nimport { useRerankerProviders } from '@/components/reranker/hooks';\nimport { fieldAccessor, GeneralSettingsField, type GeneralSettingsFieldAccessor, GeneralSettingsForm } from '@/components/settings-form';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { zodJsonText } from '@/lib/zod';\nimport { format } from 'date-fns';\nimport { useRouter } from 'next/navigation';\nimport { useTransition } from 'react';\nimport { z } from 'zod';\n\nexport function UpdateRerankerForm ({ reranker }: { reranker: Reranker }) {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n  const { data: options, isLoading, error } = useRerankerProviders();\n\n  const provider = options?.find(option => option.provider === reranker.provider);\n\n  return (\n    <div className=\"max-w-screen-sm space-y-4\">\n      <GeneralSettingsForm<UpdateReranker>\n        data={reranker}\n        readonly={false}\n        loading={transitioning}\n        onUpdate={async (data, path) => {\n          if (path[0] === 'is_default') {\n            await setDefault('embedding-models', reranker.id);\n          } else {\n            const key = path[0] as keyof UpdateReranker;\n            await updateReranker(reranker.id, {\n              [key]: data[key],\n            });\n          }\n          startTransition(() => {\n            router.refresh();\n          });\n        }}\n      >\n        <GeneralSettingsField accessor={idAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"ID\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={nameAccessor} schema={nameSchema}>\n          <field.Basic label=\"Name\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={providerAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Provider\" name=\"value\" description={provider?.provider_description}>\n            <ProviderSelect options={options} isLoading={isLoading} error={error} />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={modelAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Model\" name=\"value\" description={provider?.reranker_model_description}>\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        {provider && (\n          provider.credentials_type === 'str'\n            ? (\n              <GeneralSettingsField accessor={stringCredentialAccessor} schema={stringCredentialSchema}>\n                <field.Basic label=\"Credentials\" name=\"value\" description={provider?.credentials_description}>\n                  <FormInput placeholder={provider.default_credentials} />\n                </field.Basic>\n              </GeneralSettingsField>\n            ) : (\n              <GeneralSettingsField accessor={dictCredentialAccessor} schema={dictCredentialSchema}>\n                <field.Basic label=\"Credentials\" name=\"value\" description={provider?.credentials_description}>\n                  <CodeInput language=\"json\" placeholder={JSON.stringify(provider.default_credentials, undefined, 2)} />\n                </field.Basic>\n              </GeneralSettingsField>\n            )\n        )}\n        <GeneralSettingsField accessor={configAccessor} schema={configSchema}>\n          <field.Basic label=\"Config\" name=\"value\" description={provider?.config_description}>\n            <CodeInput language=\"json\" />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={topNAccessor} schema={topNSchema}>\n          <field.Basic label=\"Top N\" name=\"value\">\n            <FormInput type=\"number\" min={1} step={1} />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={isDefaultAccessor} schema={anySchema}>\n          <field.Contained label=\"Is Default\" name=\"value\">\n            <FormSwitch />\n          </field.Contained>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={createdAtAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Created At\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n        <GeneralSettingsField accessor={updatedAtAccessor} schema={anySchema} readonly>\n          <field.Basic label=\"Updated At\" name=\"value\">\n            <FormInput />\n          </field.Basic>\n        </GeneralSettingsField>\n      </GeneralSettingsForm>\n      <DangerousActionButton\n        variant=\"destructive\"\n        disabled={transitioning}\n        action={async () => {\n          await deleteReranker(reranker.id);\n          startTransition(() => {\n            router.push('/reranker-models');\n          });\n        }}\n      >\n        Delete\n      </DangerousActionButton>\n    </div>\n  );\n}\n\nconst field = formFieldLayout<{ value: any | any[] }>();\n\nconst anySchema = z.any();\n\nconst getDatetimeAccessor = (key: KeyOfType<Reranker, Date | undefined | null>): GeneralSettingsFieldAccessor<Reranker, string> => {\n  return {\n    path: [key],\n    get (data) {\n      return format(data[key], 'yyyy-MM-dd HH:mm:ss');\n    },\n    set () {\n      throw new Error(`update ${key} is not supported`);\n    },\n  };\n};\n\nconst nameSchema = z.string();\nconst configSchema = zodJsonText();\nconst topNSchema = z.coerce.number().int().min(1);\n\nconst nameAccessor = fieldAccessor<UpdateReranker, 'name'>('name');\nconst idAccessor = fieldAccessor<Reranker, 'id'>('id');\nconst providerAccessor = fieldAccessor<Reranker, 'provider'>('provider');\nconst modelAccessor = fieldAccessor<Reranker, 'model'>('model');\nconst topNAccessor = fieldAccessor<Reranker, 'top_n'>('top_n');\nconst configAccessor: GeneralSettingsFieldAccessor<UpdateReranker, string> = {\n  path: ['config'],\n  get (data) {\n    return JSON.stringify(data.config, undefined, 2);\n  },\n  set (data, value) {\n    return {\n      ...data,\n      // TODO: This is already converted to object by zodJsonText().\n      config: value,\n    };\n  },\n};\nconst isDefaultAccessor = fieldAccessor<Reranker, 'is_default'>('is_default');\nconst createdAtAccessor = getDatetimeAccessor('created_at');\nconst updatedAtAccessor = getDatetimeAccessor('updated_at');\n\nconst stringCredentialSchema = z.string().optional();\nconst dictCredentialSchema = zodJsonText();\n\nconst stringCredentialAccessor = fieldAccessor<UpdateReranker, 'credentials'>('credentials', '');\nconst dictCredentialAccessor: GeneralSettingsFieldAccessor<UpdateReranker, string> = {\n  path: ['credentials'],\n  get (data) {\n    return JSON.stringify(data.config, undefined, 2);\n  },\n  set (data, value) {\n    return {\n      ...data,\n      // TODO: This is already converted to object by zodJsonText().\n      config: value,\n    };\n  },\n};"
  },
  {
    "path": "frontend/app/src/components/reranker/hooks.ts",
    "content": "import { listRerankerOptions, listRerankers } from '@/api/rerankers';\nimport { listAllHelper } from '@/lib/request';\nimport useSWR from 'swr';\n\nexport function useAllRerankers (flag = true) {\n  return useSWR(flag && 'api.rerankers.list-all', () => listAllHelper(listRerankers, 'id'));\n}\n\nexport function useReranker (id: number | null | undefined) {\n  const { data, mutate, ...rest } = useAllRerankers(id != null);\n\n  return {\n    reranker: data?.find(reranker => reranker.id === id),\n    ...rest,\n  };\n}\n\nexport function useRerankerProviders () {\n  return useSWR('api.rerankers.list-options', listRerankerOptions);\n}"
  },
  {
    "path": "frontend/app/src/components/resource-not-found.tsx",
    "content": "import { NextLink } from '@/components/nextjs/NextLink';\nimport type { ReactNode } from 'react';\n\nexport function ResourceNotFound ({\n  resource,\n  buttonContent = 'Go Back',\n  buttonHref = '/',\n}: {\n  resource: string,\n  buttonContent?: ReactNode,\n  buttonHref?: string,\n}) {\n  return (\n    <div className=\"flex items-center h-full px-4 py-12 sm:px-6 md:px-8 lg:px-12 xl:px-16\">\n      <div className=\"w-full space-y-6 text-center\">\n        <div className=\"space-y-3\">\n          <h2 className=\"text-4xl sm:text-2xl\">\n            <span className=\"tracking-tighter text-muted-foreground\">\n              {'404 '}\n            </span>\n            <span className=\"font-bold\">\n              {resource}\n            </span>\n            <span className=\"tracking-tighter text-muted-foreground\">\n              {' Not Found'}\n            </span>\n          </h2>\n          <p className=\"text-muted-foreground text-sm\">\n            Looks like you&#39;ve ventured into the unknown digital realm.\n          </p>\n        </div>\n        <NextLink href={buttonHref}>\n          {buttonContent}\n        </NextLink>\n      </div>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/row-checkbox.tsx",
    "content": "'use client';\n\nimport { cn } from '@/lib/utils';\nimport * as CheckboxPrimitive from '@radix-ui/react-checkbox';\nimport { Check, Minus } from 'lucide-react';\nimport * as React from 'react';\n\nconst RowCheckbox = React.forwardRef<\n  React.ElementRef<typeof CheckboxPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof CheckboxPrimitive.Root> & { indeterminate?: boolean }\n>(({ className, indeterminate, ...props }, ref) => (\n  <CheckboxPrimitive.Root\n    ref={ref}\n    className={cn(\n      'peer h-4 w-4 shrink-0 rounded-sm border border-primary ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 data-[state=checked]:bg-primary data-[state=checked]:text-primary-foreground',\n      className,\n    )}\n    {...props}\n    checked={props.checked || indeterminate}\n  >\n    <CheckboxPrimitive.Indicator\n      className={cn('flex items-center justify-center text-current')}\n    >\n      {indeterminate ? <Minus className=\"-mt-[1px] h-4 w-4\" /> : <Check className=\"-mt-[1px] h-4 w-4\" />}\n    </CheckboxPrimitive.Indicator>\n  </CheckboxPrimitive.Root>\n));\nRowCheckbox.displayName = 'Row' + CheckboxPrimitive.Root.displayName;\n\nexport { RowCheckbox };\n"
  },
  {
    "path": "frontend/app/src/components/secondary-navigator-list.tsx",
    "content": "'use client';\n\nimport { NextLink } from '@/components/nextjs/NextLink';\nimport { Button } from '@/components/ui/button';\nimport { cn } from '@/lib/utils';\nimport * as TabsPrimitive from '@radix-ui/react-tabs';\nimport { usePathname } from 'next/navigation';\nimport { type ComponentProps, forwardRef, type ReactNode } from 'react';\n\nconst NAV_WIDTH = '14rem';\n\ndeclare module 'react' {\n  export interface CSSProperties {\n    '--secondary-sidebar-width'?: string;\n  }\n}\n\nexport const SecondaryNavigatorLayout = forwardRef<HTMLDivElement, ComponentProps<'div'> & Pick<TabsPrimitive.TabsProps, 'defaultValue'>>(({ className, style, children, defaultValue, ...props }, ref) => {\n  return (\n    <TabsPrimitive.Tabs asChild orientation=\"vertical\" defaultValue={defaultValue}>\n      <div\n        ref={ref}\n        className={cn('flex gap-6 w-full', className)}\n        style={{ '--secondary-sidebar-width': NAV_WIDTH, ...style }}\n        {...props}\n      >\n        {children}\n      </div>\n    </TabsPrimitive.Tabs>\n  );\n});\nSecondaryNavigatorLayout.displayName = 'SecondaryNavigatorLayout';\n\nexport const SecondaryNavigatorList = forwardRef<HTMLDivElement, ComponentProps<'div'>>(({ className, children, ...props }, ref) => {\n  return (\n    <TabsPrimitive.TabsList asChild loop>\n      <div ref={ref} className={cn('space-y-2 w-[--secondary-sidebar-width] flex-shrink-0', className)} {...props}>\n        {children}\n      </div>\n    </TabsPrimitive.TabsList>\n  );\n});\nSecondaryNavigatorList.displayName = 'SecondaryNavigatorList';\n\nexport function SecondaryNavigatorLink ({ pathname, children }: { pathname: string, children: ReactNode }) {\n  const current = usePathname();\n  const active = current === pathname;\n\n  return (\n    <TabsPrimitive.Trigger value={pathname} asChild>\n      <NextLink href={pathname} size=\"sm\" className={cn('flex w-full justify-start', active ? 'font-semibold' : 'font-normal')} variant={active ? 'secondary' : 'ghost'}>\n        {children}\n      </NextLink>\n    </TabsPrimitive.Trigger>\n  );\n}\n\nexport const SecondaryNavigatorItem = forwardRef<HTMLButtonElement, TabsPrimitive.TabsTriggerProps>(({ value, className, children, ...props }, ref) => {\n  return (\n    <TabsPrimitive.Trigger value={value} asChild>\n      <Button\n        ref={ref}\n        {...props}\n        variant=\"ghost\"\n        className={cn(\n          'flex w-full justify-start',\n          'font-normal data-[state=active]:font-semibold',\n          /* data-[state=active]:secondary */'data-[state=active]:bg-secondary data-[state=active]:text-secondary-foreground data-[state=active]:hover:bg-secondary/80',\n        )}\n      >\n        {children}\n      </Button>\n    </TabsPrimitive.Trigger>\n  );\n});\n\nSecondaryNavigatorItem.displayName = 'SecondaryNavigatorTabsTrigger';\n\nexport const SecondaryNavigatorMain = forwardRef<HTMLDivElement, Omit<TabsPrimitive.TabsContentProps, 'value' | 'forceMount'> & { value?: string, strategy?: 'forceMount' | 'hidden' | 'mount' }>(({ value, strategy = 'mount', className, ...props }, ref) => {\n  const classNames = cn('flex-1 overflow-x-hidden', className);\n  if (value == null) {\n    return <div ref={ref} className={classNames} {...props} />;\n  } else {\n    return (\n      <TabsPrimitive.TabsContent\n        ref={ref}\n        value={value}\n        forceMount={strategy !== 'mount' ? true : undefined}\n        className={cn(classNames, strategy === 'hidden' && 'hidden data-[state=active]:block')}\n        tabIndex={undefined}\n        {...props}\n      />\n    );\n  }\n});\n\nSecondaryNavigatorMain.displayName = 'SecondaryNavigatorContent';\n"
  },
  {
    "path": "frontend/app/src/components/security-setting-provider.tsx",
    "content": "'use client';\n\nimport { ISecuritySettingResult } from '@/core/schema/settings/security';\nimport { createContext } from 'react';\n\nexport const SecuritySettingContext = createContext<ISecuritySettingResult>({});\n\ndeclare var grecaptcha: any;\n\nexport async function withReCaptcha (\n  options: {\n    action: string;\n    siteKey: string;\n    mode?: 'v3' | 'enterprise' | '';\n  },\n  func: (data: { action: string; siteKey: string; token: string }) => void,\n) {\n  const { action, siteKey } = options;\n  // skip if no siteKey\n  if (!siteKey) {\n    return func({ action, siteKey, token: '' });\n  }\n  if (options.mode === 'v3') {\n    grecaptcha.ready(async () => {\n      const token = await grecaptcha.execute(siteKey, { action });\n      func({ action, siteKey, token });\n    });\n  } else if (options.mode === 'enterprise') {\n    grecaptcha.enterprise.ready(async () => {\n      const token = await grecaptcha.enterprise.execute(siteKey, { action });\n      func({ action, siteKey, token });\n    });\n  }\n}"
  },
  {
    "path": "frontend/app/src/components/settings/CustomJsSettings.tsx",
    "content": "'use client';\n\nimport type { AllSettings } from '@/api/site-settings';\nimport { SettingsField } from '@/components/settings/SettingsField';\nimport { StringArrayField } from '@/components/settings/StringArrayField';\nimport { z } from 'zod';\n\nexport function CustomJsSettings ({ schema }: { schema: AllSettings }) {\n  return (\n    <div className=\"space-y-8 max-w-screen-md\">\n      <section className=\"space-y-6\">\n        <SettingsField name=\"custom_js_logo_src\" item={schema.custom_js_logo_src} />\n        <SettingsField name=\"custom_js_button_label\" item={schema.custom_js_button_label} />\n        <SettingsField name=\"custom_js_button_img_src\" item={schema.custom_js_button_img_src} />\n        <SettingsField name=\"custom_js_example_questions\" item={schema.custom_js_example_questions} arrayItemSchema={z.string()}>\n          {props => <StringArrayField {...props} />}\n        </SettingsField>\n      </section>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/settings/IntegrationsSettings.tsx",
    "content": "'use client';\n\nimport type { AllSettings } from '@/api/site-settings';\nimport { SettingsField } from '@/components/settings/SettingsField';\n\nexport function IntegrationsSettings ({ schema, showPostVerificationSettings }: { schema: AllSettings, showPostVerificationSettings: boolean }) {\n  return (\n    <div className=\"space-y-8 max-w-screen-md\">\n      <LangfuseSettings schema={schema} />\n      {showPostVerificationSettings && <ExperimentalPostVerificationSettings schema={schema} />}\n    </div>\n  );\n}\n\nexport function LangfuseSettings ({ schema, hideTitle, disabled, onChanged }: { schema: AllSettings, hideTitle?: boolean, disabled?: boolean, onChanged?: () => void }) {\n  return (\n    <section className=\"space-y-6\">\n      {!hideTitle && <h2 className=\"text-lg font-medium\">Langfuse</h2>}\n      <SettingsField name=\"langfuse_public_key\" item={schema.langfuse_public_key} onChanged={onChanged} disabled={disabled} />\n      <SettingsField name=\"langfuse_secret_key\" item={schema.langfuse_secret_key} onChanged={onChanged} disabled={disabled} />\n      <SettingsField name=\"langfuse_host\" item={schema.langfuse_host} onChanged={onChanged} disabled={disabled} />\n    </section>\n  );\n}\n\nexport function ExperimentalPostVerificationSettings ({ schema, hideTitle, disabled, onChanged }: { schema: AllSettings, hideTitle?: boolean, disabled?: boolean, onChanged?: () => void }) {\n  return (\n    <section className=\"space-y-6\">\n      {!hideTitle && <h2 className=\"text-lg font-medium\">[Experimental] Post verifications</h2>}\n      <SettingsField name=\"enable_post_verifications\" item={schema.enable_post_verifications} onChanged={onChanged} disabled={disabled} />\n      <SettingsField name=\"enable_post_verifications_for_widgets\" item={schema.enable_post_verifications_for_widgets} onChanged={onChanged} disabled={disabled} />\n    </section>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/settings/LinkArrayField.tsx",
    "content": "import { Button } from '@/components/ui/button';\nimport { Input } from '@/components/ui/input';\nimport { PlusIcon } from 'lucide-react';\nimport { forwardRef } from 'react';\nimport type { ControllerRenderProps } from 'react-hook-form';\n\n/**\n * @deprecated\n */\nexport const LinkArrayField = forwardRef<HTMLDivElement, ControllerRenderProps>(({ value, onChange, name, disabled, onBlur }, ref) => {\n  return (\n    <div className=\"space-y-1\" ref={ref}>\n      {(value as { text: string, href: string }[] | null)?.map((item, index) => (\n        <div key={index} className=\"flex gap-2 items-center\">\n          <Input\n            className=\"w-40\"\n            disabled={disabled}\n            value={item.text}\n            onChange={event => {\n              value = [...value];\n              value[index] = {\n                ...value[index],\n                text: event.target.value,\n              };\n              onChange(value);\n            }}\n            onBlur={onBlur}\n          />\n          <Input\n            value={item.href}\n            disabled={disabled}\n            onChange={event => {\n              value = [...value];\n              value[index] = {\n                ...value[index],\n                href: event.target.value,\n              };\n              onChange(value);\n            }}\n            onBlur={onBlur}\n          />\n          <Button\n            variant=\"secondary\"\n            disabled={disabled}\n            type=\"button\"\n            onClick={() => {\n              value = [...value];\n              value.splice(index, 0, { text: '', href: '' });\n              onChange(value);\n            }}\n          >\n            Add\n          </Button>\n          <Button\n            type=\"button\"\n            disabled={disabled}\n            variant=\"ghost\"\n            onClick={() => {\n              value = [...value];\n              value.splice(index, 1);\n              onChange(value);\n            }}\n          >\n            Delete\n          </Button>\n        </div>\n      ))}\n      {!disabled && <Button\n        className=\"gap-2\"\n        variant=\"ghost\"\n        type=\"button\"\n        onClick={() => onChange([...value, { text: '', href: '' }])}\n      >\n        <PlusIcon className=\"size-4\" />\n        Add\n      </Button>}\n    </div>\n  );\n});\n\nLinkArrayField.displayName = 'LinkArrayField';\n"
  },
  {
    "path": "frontend/app/src/components/settings/SettingsField.tsx",
    "content": "import { type SettingItem, updateSiteSetting } from '@/api/site-settings';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { Button } from '@/components/ui/button';\nimport { Form, FormControl, FormDescription, FormField, FormItem, FormLabel, FormMessage } from '@/components/ui/form';\nimport { Input } from '@/components/ui/input';\nimport { Switch } from '@/components/ui/switch';\nimport { getErrorMessage } from '@/lib/errors';\nimport { cn } from '@/lib/utils';\nimport { zodResolver } from '@hookform/resolvers/zod';\nimport { capitalCase } from 'change-case-all';\nimport { deepEqual } from 'fast-equals';\nimport { CheckIcon, Loader2Icon, TriangleAlertIcon } from 'lucide-react';\nimport { useRouter } from 'next/navigation';\nimport { cloneElement, type ReactElement, type ReactNode, useCallback, useDeferredValue, useMemo, useTransition } from 'react';\nimport { type ControllerRenderProps, useForm, useFormState, useWatch } from 'react-hook-form';\nimport { toast } from 'sonner';\nimport { z, type ZodType } from 'zod';\n\nexport interface SettingsFieldProps {\n  name: string;\n  item: SettingItem;\n  arrayItemSchema?: ZodType;\n  objectSchema?: ZodType;\n  onChanged?: () => void;\n  disabled?: boolean;\n  children?: (props: ControllerRenderProps) => ReactElement<any>;\n}\n\n/**\n * @deprecated\n */\nexport function SettingsField ({ name, item, arrayItemSchema, objectSchema, onChanged, disabled, children }: SettingsFieldProps) {\n  const router = useRouter();\n  const [transitioning, startTransition] = useTransition();\n\n  if (!item) {\n    return (\n      <Alert variant=\"warning\">\n        <TriangleAlertIcon />\n        <AlertTitle>Failed to load <em>{name}</em></AlertTitle>\n        <AlertDescription>Frontend and backend services may be misconfigured, please check your deployments.</AlertDescription>\n      </Alert>\n    );\n  }\n\n  if (item.data_type === 'list') {\n    if (!arrayItemSchema) {\n      throw new Error(`list item requires array item schema`);\n    }\n  }\n\n  if (item.data_type === 'dict') {\n    if (!objectSchema) {\n      throw new Error(`dict item requires object schema`);\n    }\n  }\n\n  // eslint-disable-next-line react-hooks/rules-of-hooks\n  const schema = useMemo(() => {\n    let schema: ZodType;\n    switch (item.data_type) {\n      case 'str':\n        schema = z.string();\n        break;\n      case 'bool':\n        schema = z.coerce.boolean();\n        break;\n      case 'int':\n        schema = z.coerce.number().int();\n        break;\n      case 'float':\n        schema = z.coerce.number();\n        break;\n      case 'list':\n        if (!arrayItemSchema) {\n          throw new Error(`list item requires array item schema`);\n        }\n        schema = arrayItemSchema.array();\n        break;\n      case 'dict':\n        if (!objectSchema) {\n          throw new Error(`dict item requires object schema`);\n        }\n        schema = objectSchema;\n        break;\n      default:\n        throw new Error(`unknown data type`);\n    }\n    return z.object({ [item.name]: schema });\n  }, [item.name, item.data_type, arrayItemSchema, objectSchema]);\n\n  // eslint-disable-next-line react-hooks/rules-of-hooks\n  const form = useForm({\n    resolver: zodResolver(schema),\n    disabled: disabled || transitioning,\n    values: {\n      [item.name]: item.value,\n    },\n    defaultValues: {\n      [item.name]: item.default,\n    },\n  });\n\n  // eslint-disable-next-line react-hooks/rules-of-hooks\n  const Control = useCallback(({ field: { ...props } }: { field: ControllerRenderProps }) => {\n    let el: ReactNode;\n\n    if (children) {\n      el = cloneElement(children(props), props);\n    } else {\n      switch (item.data_type) {\n        case 'int':\n          el = <Input type=\"number\" step={1} placeholder={String(item.default)} {...props} />;\n          break;\n        case 'float':\n          el = <Input type=\"number\" {...props} placeholder={String(item.default)} />;\n          break;\n        case 'str':\n          el = <Input {...props} placeholder={item.default} />;\n          break;\n        case 'bool':\n          el = <Switch className=\"block\" {...props} onChange={undefined} checked={props.value} onCheckedChange={props.onChange} />;\n          break;\n        case 'dict':\n        case 'list':\n          throw new Error(`data type ${item.data_type} requires custom children`);\n      }\n    }\n\n    return (\n      <FormControl>\n        {el}\n      </FormControl>\n    );\n  }, [item.default, item.data_type, children]);\n\n  const handleSubmit = form.handleSubmit(async data => {\n    try {\n      await updateSiteSetting(name, data[item.name]);\n      form.reset({ [item.name]: data[item.name] });\n      startTransition(() => {\n        router.refresh();\n      });\n      onChanged?.();\n      toast.success(`Changes successfully saved.`);\n    } catch (e) {\n      form.setError(item.name, { type: 'value', message: getErrorMessage(e) });\n      return Promise.reject(e);\n    }\n  });\n\n  return (\n    <Form {...form}>\n      <form\n        id={`setting_form_${name}`}\n        className=\"space-y-2\"\n        onSubmit={handleSubmit}\n        onReset={(e) => {\n          form.setValue(item.name, item.default, { shouldTouch: true, shouldDirty: true });\n          // void handleSubmit(e);\n        }}\n      >\n        <FormField\n          name={item.name}\n          disabled={form.formState.isSubmitting}\n          render={({ field }) => (\n            <FormItem>\n              <FormLabel>{capitalCase(item.name)}</FormLabel>\n              <Control field={field} />\n              <FormDescription>{item.description}</FormDescription>\n              <FormMessage />\n            </FormItem>\n          )}\n        />\n        <Operations name={item.name} defaultValue={item.default} refreshing={transitioning} />\n      </form>\n    </Form>\n  );\n}\n\nfunction Operations ({ refreshing, name, defaultValue }: { refreshing: boolean, name: string, defaultValue: any }) {\n  const currentValue = useWatch({\n    name,\n  });\n  const { isDirty, isSubmitting, disabled, isSubmitted } = useFormState();\n  const notDefault = !deepEqual(currentValue, defaultValue);\n\n  const deferredIsDirty = useDeferredValue(isDirty);\n  const deferredIsSubmitting = useDeferredValue(isSubmitting);\n\n  const successAndWaitRefreshing = !isSubmitting && (deferredIsSubmitting && refreshing);\n\n  return (\n    <div className=\"flex gap-2 items-center\">\n      {(isDirty || deferredIsDirty) && <Button className={cn('gap-2 items-center', successAndWaitRefreshing && 'bg-success')} type=\"submit\" disabled={isSubmitting || successAndWaitRefreshing || disabled}>\n        {(isSubmitting) && <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />}\n        {successAndWaitRefreshing && <CheckIcon className=\"size-4\" />}\n        {isSubmitting ? 'Saving...' : refreshing ? 'Saved' : 'Save'}\n      </Button>}\n      {(isDirty || notDefault) && <Button type=\"reset\" variant=\"secondary\" disabled={isSubmitting || !notDefault || disabled}>Reset</Button>}\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/settings/StringArrayField.tsx",
    "content": "import { Button } from '@/components/ui/button';\nimport { Input } from '@/components/ui/input';\nimport { PlusIcon } from 'lucide-react';\nimport { forwardRef } from 'react';\nimport type { ControllerRenderProps } from 'react-hook-form';\n\n/**\n * @deprecated\n */\nexport const StringArrayField = forwardRef<HTMLDivElement, ControllerRenderProps>(({ value, onChange, name, disabled, onBlur }, ref) => {\n  return (\n    <div className=\"space-y-1\" ref={ref}>\n      {(value as string[] | null)?.map((item, index) => (\n        <div key={index} className=\"flex gap-2 items-center\">\n          <Input\n            value={item}\n            disabled={disabled}\n            onChange={event => {\n              value = [...value];\n              value[index] = event.target.value;\n              onChange(value);\n            }}\n            onBlur={onBlur}\n          />\n          <Button\n            variant=\"secondary\"\n            disabled={disabled}\n            type=\"button\"\n            onClick={() => {\n              value = [...value];\n              value.splice(index, 0, '');\n              onChange(value);\n            }}\n          >\n            Add\n          </Button>\n          <Button\n            type=\"button\"\n            disabled={disabled}\n            variant=\"ghost\"\n            onClick={() => {\n              value = [...value];\n              value.splice(index, 1);\n              onChange(value);\n            }}\n          >\n            Delete\n          </Button>\n        </div>\n      ))}\n      {!disabled && <Button\n        className=\"gap-2\"\n        variant=\"ghost\"\n        type=\"button\"\n        onClick={() => onChange([...value, ''])}\n      >\n        <PlusIcon className=\"size-4\" />\n        Add\n      </Button>}\n    </div>\n  );\n});\n\nStringArrayField.displayName = 'StringArrayField';\n"
  },
  {
    "path": "frontend/app/src/components/settings/WebsiteSettings.tsx",
    "content": "'use client';\n\nimport type { AllSettings } from '@/api/site-settings';\nimport { LinkArrayField } from '@/components/settings/LinkArrayField';\nimport { SettingsField } from '@/components/settings/SettingsField';\nimport { StringArrayField } from '@/components/settings/StringArrayField';\nimport { Separator } from '@/components/ui/separator';\nimport { z } from 'zod';\n\nexport function WebsiteSettings ({ schema }: { schema: AllSettings }) {\n  return (\n    <div className=\"space-y-8 max-w-screen-md\">\n      <section className=\"space-y-6\">\n        <h2 className=\"text-lg font-medium\">Basic Settings</h2>\n        <SettingsField name=\"title\" item={schema.title} />\n        <SettingsField name=\"description\" item={schema.description} />\n        <SettingsField name=\"logo_in_dark_mode\" item={schema.logo_in_dark_mode} />\n        <SettingsField name=\"logo_in_light_mode\" item={schema.logo_in_light_mode} />\n      </section>\n      <Separator />\n      <section className=\"space-y-6\">\n        <h2 className=\"text-lg font-medium\">Homepage Settings</h2>\n        <SettingsField name=\"homepage_title\" item={schema.homepage_title} />\n        <SettingsField name=\"homepage_example_questions\" item={schema.homepage_example_questions} arrayItemSchema={z.string()}>\n          {props => <StringArrayField {...props} />}\n        </SettingsField>\n        <SettingsField name=\"homepage_footer_links\" item={schema.homepage_footer_links} arrayItemSchema={z.object({ text: z.string(), href: z.string() })}>\n          {props => <LinkArrayField {...props} />}\n        </SettingsField>\n      </section>\n      <Separator />\n      <section className=\"space-y-6\">\n        <h2 className=\"text-lg font-medium\">Social links</h2>\n        <SettingsField name=\"social_github\" item={schema.social_github} />\n        <SettingsField name=\"social_twitter\" item={schema.social_twitter} />\n        <SettingsField name=\"social_discord\" item={schema.social_discord} />\n      </section>\n      <Separator />\n      <section className=\"space-y-6\">\n        <h2 className=\"text-lg font-medium\">Analytics</h2>\n        <SettingsField name=\"ga_id\" item={schema.ga_id} />\n      </section>\n      <Separator />\n      <section className=\"space-y-6\">\n        <h2 className=\"text-lg font-medium\">Uploads</h2>\n        <SettingsField name=\"max_upload_file_size\" item={schema.max_upload_file_size} />\n      </section>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/settings/WidgetSnippet.tsx",
    "content": "'use client';\n\nimport { HtmlViewer } from '@/components/html-viewer';\nimport { Button } from '@/components/ui/button';\nimport { useEffect, useState } from 'react';\n\nconst base = process.env.SITE_URL || '';\n\nfunction parseOrigin (url: string) {\n  try {\n    return new URL(url).origin;\n  } catch {\n    return '';\n  }\n}\n\nexport function WidgetSnippet () {\n  const [copied, setCopied] = useState(false);\n\n  const [url, setUrl] = useState(base + '/widget.js');\n\n  const origin = parseOrigin(url);\n  const script = `<script src=\"${url}\" ${origin && `data-api-base=\"${origin}\"`} async></script>`;\n\n  useEffect(() => {\n    if (!process.env.SITE_URL) {\n      setUrl(location.origin + '/widget.js');\n    }\n  }, []);\n\n  return (\n    <div className=\"rounded-lg p-2 border mb-4 relative bg-foreground/5\">\n      <div className=\"text-xs font-mono opacity-30 select-none\">&lt;<span className=\"hljs-name\">html</span>&gt;</div>\n      <div className=\"text-xs font-mono opacity-30 select-none\">&lt;<span className=\"hljs-name\">body</span>&gt;</div>\n      <div className=\"text-xs font-mono opacity-30 select-none whitespace-pre\"> ...</div>\n      <HtmlViewer value={`  ${script}`} />\n      <div className=\"text-xs font-mono opacity-30 select-none\">&lt;/<span className=\"hljs-name\">body</span>&gt;</div>\n      <div className=\"text-xs font-mono opacity-30 select-none\">&lt;/<span className=\"hljs-name\">html</span>&gt;</div>\n      <Button variant=\"secondary\" size=\"sm\" className=\"absolute top-0.5 right-0.5 select-none\" onClick={() => {\n        navigator.clipboard.writeText(script);\n        setCopied(true);\n      }}>\n        {copied ? 'Copied' : 'Copy'}\n      </Button>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/settings-form/GeneralSettingsField.tsx",
    "content": "import { FormRootError } from '@/components/form/root-error';\nimport { useGeneralSettingsFormContext } from '@/components/settings-form/context';\nimport { Button } from '@/components/ui/button';\nimport { Form, formDomEventHandlers } from '@/components/ui/form.beta';\nimport { getErrorMessage } from '@/lib/errors';\nimport { useForm } from '@tanstack/react-form';\nimport { Loader2Icon } from 'lucide-react';\nimport type { ReactNode } from 'react';\nimport { z, type ZodType } from 'zod';\n\nexport interface GeneralSettingsFieldAccessor<Data, FieldData> {\n  path: [keyof Data, ...(string | number | symbol)[]]\n  get: (data: Readonly<Data>) => FieldData,\n  set: (data: Readonly<Data>, value: FieldData) => Data,\n}\n\nexport function fieldAccessor<Data, Key extends keyof Data> (key: Key, defaultValue?: Data[Key]): GeneralSettingsFieldAccessor<Data, Data[Key]> {\n  return {\n    path: [key],\n    get: (data) => data[key] ?? defaultValue as Data[Key],\n    set: (data, value) => {\n      return {\n        ...data,\n        [key]: value,\n      };\n    },\n  };\n}\n\nexport function GeneralSettingsField<Data, FieldData> ({\n  accessor, schema, children, readonly: fieldReadonly = false,\n}: {\n  accessor: GeneralSettingsFieldAccessor<Data, FieldData>,\n  schema: z.ZodType<FieldData, any, any>,\n  readonly?: boolean,\n  children: ReactNode,\n}) {\n  const { data, disabled, readonly, onUpdateField } = useGeneralSettingsFormContext<Data>();\n  const form = useForm<{ value: FieldData }>({\n    validators: {\n      onChange: z.object({\n        value: schema,\n      }).strict() as ZodType<{ value: FieldData }, any, any>,\n      onSubmit: z.object({\n        value: schema,\n      }).strict() as ZodType<{ value: FieldData }, any, any>,\n    },\n    defaultValues: {\n      value: accessor.get(data),\n    },\n    onSubmit: async ({ value: { value }, formApi }) => {\n      try {\n        await onUpdateField(schema.parse(value), accessor);\n        formApi.reset({\n          value,\n        });\n      } catch (e) {\n        formApi.setErrorMap({\n          onChange: getErrorMessage(e),\n        });\n      }\n    },\n  });\n\n\n  return (\n    <Form<{ value: FieldData }, undefined> disabled={disabled || readonly || fieldReadonly} form={form}>\n      <form className=\"space-y-6\" {...formDomEventHandlers(form)}>\n        {children}\n        <FormRootError />\n        {!readonly && (\n          <form.Subscribe selector={state => [state.isDirty, state.isSubmitting] as const}>\n            {([isDirty, isSubmitting]) => (isDirty || isSubmitting) && (\n              <div className=\"flex items-center gap-2\">\n                <Button type=\"submit\" disabled={disabled || isSubmitting || readonly || fieldReadonly}>\n                  {isSubmitting && <Loader2Icon className=\"animate-spin repeat-infinite\" />}\n                  {isSubmitting ? 'Saving' : 'Save'}\n                </Button>\n                <Button type=\"reset\" variant=\"secondary\" disabled={disabled || isSubmitting || readonly || fieldReadonly}>Reset</Button>\n              </div>\n            )}\n          </form.Subscribe>\n        )}\n      </form>\n    </Form>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/settings-form/GeneralSettingsForm.tsx",
    "content": "import { GeneralSettingsFormContextProvider, type OnUpdateField } from '@/components/settings-form/context';\nimport { useLatestRef } from '@/components/use-latest-ref';\nimport { type ReactNode, useOptimistic, useTransition } from 'react';\n\nexport function GeneralSettingsForm<Data> ({ data, loading, readonly, onUpdate, children }: {\n  data: Data,\n  readonly: boolean,\n  loading: boolean,\n  onUpdate: (data: Readonly<Data>, path: (string | number | symbol)[]) => Promise<void>,\n  children: ReactNode,\n}) {\n  const [updating, startTransition] = useTransition();\n  const dataRef = useLatestRef(data);\n\n  const [optimisticData, setOptimisticData] = useOptimistic(data);\n\n  const onUpdateField: OnUpdateField<Data> = async (value, accessor) => {\n    const data = accessor.set(dataRef.current, value);\n\n    const updatePromise = onUpdate(data, accessor.path);\n    startTransition(async () => {\n      setOptimisticData(data);\n      await updatePromise;\n    });\n\n    await updatePromise;\n  };\n\n  return (\n    <GeneralSettingsFormContextProvider value={{ data: optimisticData, readonly, disabled: loading || updating, onUpdateField }}>\n      {children}\n    </GeneralSettingsFormContextProvider>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/settings-form/accessor-helper.ts",
    "content": "import type { GeneralSettingsFieldAccessor } from '@/components/settings-form/GeneralSettingsField';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport { format, parse } from 'date-fns';\n\ninterface AccessorHelper<Row> {\n  field<K extends string & keyof Row> (key: K, defaultValue?: Row[K]): GeneralSettingsFieldAccessor<Row, Row[K]>;\n\n  dateField<K extends string & KeyOfType<Row, Date>> (key: K): GeneralSettingsFieldAccessor<Row, string>;\n\n  jsonTextField<K extends string & keyof Row> (key: K): GeneralSettingsFieldAccessor<Row, string>;\n\n  nestedField<\n    K0 extends string & KeyOfType<Row, Record<string, any>>,\n    K1 extends string & keyof Row[K0]\n  > (k0: K0, k1: K1): GeneralSettingsFieldAccessor<Row, Row[K0][K1]>;\n}\n\nexport function createAccessorHelper<Row> (): AccessorHelper<Row> {\n  return {\n    field<K extends keyof Row> (key: K, defaultValue: Row[K]): GeneralSettingsFieldAccessor<Row, Row[K]> {\n      return {\n        path: [key],\n        get (data) {\n          return data[key] ?? defaultValue;\n        },\n        set (data, value) {\n          return {\n            ...data,\n            [key]: value,\n          };\n        },\n      };\n    },\n    dateField<K extends string & KeyOfType<Row, Date>> (key: K): GeneralSettingsFieldAccessor<Row, string> {\n      return {\n        path: [key],\n        get (data) {\n          const date = data[key] as Date | undefined | null;\n          if (!date) {\n            return '--';\n          }\n          return format(date, 'yyyy-MM-dd HH:mm:ss');\n        },\n        set (data, value) {\n          const date = parse(value, 'yyyy-MM-dd HH:mm:ss', new Date());\n          return {\n            ...data,\n            [key]: date,\n          };\n        },\n      };\n    },\n    jsonTextField<K extends string & KeyOfType<Row, Date>> (key: K): GeneralSettingsFieldAccessor<Row, string> {\n      return {\n        path: [key],\n        get (data) {\n          return JSON.stringify(data[key], undefined, 2);\n        },\n        set (data, value) {\n          return {\n            ...data,\n            [key]: JSON.parse(value),\n          };\n        },\n      };\n    },\n    nestedField<K0 extends string & KeyOfType<Row, Record<string, any>>, K1 extends string & keyof Row[K0]> (k0: K0, k1: K1): GeneralSettingsFieldAccessor<Row, Row[K0][K1]> {\n      return {\n        path: [k0, k1],\n        get (row) {\n          return row[k0]?.[k1] as any;\n        },\n        set (row, value) {\n          return {\n            ...row,\n            [k0]: {\n              ...row[k0],\n              [k1]: value,\n            },\n          };\n        },\n      };\n    },\n  };\n}"
  },
  {
    "path": "frontend/app/src/components/settings-form/context.tsx",
    "content": "import type { GeneralSettingsFieldAccessor } from '@/components/settings-form/GeneralSettingsField';\nimport { createContext, type ReactNode, useContext } from 'react';\n\nexport type OnUpdateField<Data> = <T> (value: T, accessor: GeneralSettingsFieldAccessor<Data, T>) => void | Promise<void>;\n\nexport interface GeneralSettingsFormContextValues<Data> {\n  data: Data;\n  disabled: boolean;\n  readonly: boolean;\n  onUpdateField: OnUpdateField<Data>;\n}\n\nconst GeneralSettingsFormContext = createContext<GeneralSettingsFormContextValues<any>>(null as any);\n\nexport function useGeneralSettingsFormContext<Data> () {\n  return useContext(GeneralSettingsFormContext);\n}\n\nexport function GeneralSettingsFormContextProvider<Data> ({ value, children }: { value: GeneralSettingsFormContextValues<Data>, children: ReactNode }) {\n  return (\n    <GeneralSettingsFormContext value={value}>\n      {children}\n    </GeneralSettingsFormContext>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/settings-form/index.ts",
    "content": "export { GeneralSettingsForm } from './GeneralSettingsForm';\nexport { fieldAccessor, GeneralSettingsField, type GeneralSettingsFieldAccessor } from './GeneralSettingsField';\nexport { shallowPick } from './utils';\nexport { createAccessorHelper } from './accessor-helper';\n"
  },
  {
    "path": "frontend/app/src/components/settings-form/utils.ts",
    "content": "export function shallowPick<Data, Key extends keyof Data> (data: Data, path: [Key, ...(string | number | symbol)[]]) {\n  const key = path[0];\n  return { [key]: data[key] } as Pick<Data, Key>;\n}"
  },
  {
    "path": "frontend/app/src/components/signin.tsx",
    "content": "'use client';\n\nimport { login } from '@/api/auth';\nimport { FormInput } from '@/components/form/control-widget';\nimport { formFieldLayout } from '@/components/form/field-layout';\nimport { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { Button } from '@/components/ui/button';\nimport { Form, formDomEventHandlers } from '@/components/ui/form.beta';\nimport { getErrorMessage } from '@/lib/errors';\nimport { useForm } from '@tanstack/react-form';\nimport { Loader2Icon } from 'lucide-react';\nimport { useRouter } from 'next/navigation';\nimport { useState, useTransition } from 'react';\n\nconst field = formFieldLayout<{\n  username: string\n  password: string\n}>();\n\nexport function Signin ({ noRedirect = false, callbackUrl }: { noRedirect?: boolean, callbackUrl?: string }) {\n  const [transitioning, startTransition] = useTransition();\n  const router = useRouter();\n  const [error, setError] = useState<string>();\n  const form = useForm<{ username: string; password: string }>({\n    defaultValues: {\n      username: '',\n      password: '',\n    },\n    onSubmit: async ({ value }) => {\n      setError(undefined);\n      try {\n        await login(value);\n        startTransition(() => {\n          if (!noRedirect) {\n            router.replace(refineCallbackUrl(callbackUrl));\n          }\n          router.refresh();\n        });\n      } catch (error) {\n        setError(getErrorMessage(error));\n      }\n    },\n  });\n\n  const loading = form.state.isSubmitting || transitioning;\n\n  return (\n    <>\n      {error && (\n        <Alert variant=\"destructive\">\n          <AlertTitle>\n            Failed to login\n          </AlertTitle>\n          <AlertDescription>\n            Could not login with provided credentials.\n          </AlertDescription>\n        </Alert>\n      )}\n      <Form form={form} disabled={transitioning}>\n        <form className=\"space-y-2\" {...formDomEventHandlers(form, transitioning)}>\n          <field.Basic name=\"username\" label=\"Username\">\n            <FormInput placeholder=\"x@example.com\" />\n          </field.Basic>\n          <field.Basic name=\"password\" label=\"Password\">\n            <FormInput type=\"password\" />\n          </field.Basic>\n          <Button className=\"!mt-4 w-full\" type=\"submit\" disabled={loading}>\n            {loading && <Loader2Icon className=\"w-4 h-4 mr-2 animate-spin repeat-infinite\" />}\n            {transitioning ? 'Redirecting...' : loading ? 'Logging in...' : 'Login'}\n          </Button>\n        </form>\n      </Form>\n    </>\n  );\n}\n\nfunction refineCallbackUrl (url: string | undefined) {\n  if (!url) {\n    return `${location.origin}`;\n  }\n  if (/auth\\/login/.test(url)) {\n    return `${location.origin}`;\n  } else {\n    return url;\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/site-header-actions.tsx",
    "content": "import DiscordSvg from '@/components/icons/discord.svg';\nimport GithubSvg from '@/components/icons/github.svg';\nimport TwitterXSvg from '@/components/icons/twitter-x.svg';\nimport { ThemeToggle } from '@/components/theme-toggle';\nimport { Button } from '@/components/ui/button';\nimport clsx from 'clsx';\nimport NextLink from 'next/link';\n\nexport type SiteSocialsType = {\n  github?: string | null;\n  twitter?: string | null;\n  discord?: string | null;\n};\n\nexport function SiteHeaderActions (props: {\n  className?: string;\n  social?: SiteSocialsType;\n}) {\n  const { className, social = {} } = props;\n  return (\n    <div className={clsx('h-header w-full gap-0.5 items-center', className)}>\n      <ThemeToggle />\n      {social?.github && (\n        <NextLink href={social.github} target=\"_blank\" className=\"ml-auto\">\n          <Button size=\"icon\" variant=\"ghost\" className=\"rounded-full\">\n            <GithubSvg />\n          </Button>\n        </NextLink>\n      )}\n\n      {social.twitter && (\n        <NextLink href={social.twitter} target=\"_blank\" className=\"\">\n          <Button size=\"icon\" variant=\"ghost\" className=\"rounded-full\">\n            <TwitterXSvg />\n          </Button>\n        </NextLink>\n      )}\n\n      {social.discord && (\n        <NextLink href={social.discord} target=\"_blank\" className=\"\">\n          <Button size=\"icon\" variant=\"ghost\" className=\"rounded-full\">\n            <DiscordSvg />\n          </Button>\n        </NextLink>\n      )}\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/site-header.tsx",
    "content": "import type { PublicWebsiteSettings } from '@/api/site-settings';\nimport { Branding } from '@/components/branding';\nimport { SiteHeaderActions } from '@/components/site-header-actions';\nimport { SidebarTrigger } from '@/components/ui/sidebar';\n\nexport function SiteHeaderSmallScreen ({ setting }: { setting: PublicWebsiteSettings }) {\n  return (\n    <header className=\"md:hidden h-header px-2 sticky top-0 bg-background border-b z-10 flex gap-2 items-center\">\n      <Branding setting={setting} />\n    </header>\n  );\n}\n\nexport function SiteHeaderLargeScreen ({ setting }: { setting: PublicWebsiteSettings }) {\n  return (\n    <div className=\"fixed top-0 right-2 md:top-4 md:right-4 z-10\">\n      <div className=\"flex gap-2 items-center\">\n        <SidebarTrigger className=\"md:hidden\" />\n        <SiteHeaderActions\n          className=\"flex h-fit\"\n          social={{\n            twitter: setting.social_twitter,\n            github: setting.social_github,\n            discord: setting.social_discord,\n          }}\n        />\n      </div>\n    </div>\n  );\n}"
  },
  {
    "path": "frontend/app/src/components/site-nav.tsx",
    "content": "'use client';\n\nimport { AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent, AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle, AlertDialogTrigger } from '@/components/ui/alert-dialog';\nimport { Button, type ButtonProps } from '@/components/ui/button';\nimport { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';\nimport { SidebarGroup, SidebarGroupContent, SidebarGroupLabel, SidebarMenu, SidebarMenuBadge, SidebarMenuButton, SidebarMenuItem, SidebarMenuSub, SidebarMenuSubButton, SidebarMenuSubItem } from '@/components/ui/sidebar';\nimport { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';\nimport { ChevronDownIcon, TrashIcon } from 'lucide-react';\nimport Link from 'next/link';\nimport { usePathname } from 'next/navigation';\n\nimport { type ComponentType, Fragment, type ReactElement, type ReactNode } from 'react';\n\nexport interface NavGroup {\n  title?: ReactNode;\n  items: NavItem[];\n  sectionProps?: {\n    className?: string;\n  };\n}\n\nexport interface NavBaseItem {\n  icon?: ComponentType<{ className?: string }>;\n  title: ReactNode;\n  details?: ReactNode;\n  className?: string;\n  disabled?: ReactNode | boolean;\n}\n\nexport interface NavLinkItem extends NavBaseItem {\n  parent?: undefined;\n  custom?: undefined;\n  href: string;\n  exact?: boolean;\n  variant?: ButtonProps['variant'] | ((active: boolean) => ButtonProps['variant']);\n  onDelete?: () => void;\n  deleteResourceName?: string;\n}\n\nexport interface NavParentItem extends NavBaseItem {\n  parent: true;\n  key: string;\n  custom?: undefined;\n  variant?: ButtonProps['variant'] | ((active: boolean) => ButtonProps['variant']);\n  children: (NavLinkItem | CustomItem)[];\n}\n\nexport interface CustomItem {\n  parent?: undefined;\n  custom: true;\n  key: string;\n  children: ReactNode;\n}\n\nexport type NavItem = NavLinkItem | NavParentItem | CustomItem;\n\nconst isCustomItem = (item: NavItem): item is CustomItem => !!item.custom;\nconst isParentItem = (item: NavItem): item is NavParentItem => !!item.parent;\nconst isNavLinkItem = (item: NavItem): item is NavLinkItem => 'href' in item && !('children' in item);\n\nexport interface SiteNavProps {\n  groups: NavGroup[];\n}\n\nexport function SiteNav ({ groups }: SiteNavProps) {\n  const pathname = usePathname() ?? '';\n\n  return (\n    <TooltipProvider>\n      {groups.map((group, index) => (\n        <Fragment key={index}>\n          <SiteNavGroup group={group} current={pathname} />\n        </Fragment>\n      ))}\n    </TooltipProvider>\n  );\n}\n\nfunction SiteNavGroup ({ group, current }: { group: NavGroup, current: string }) {\n  const { sectionProps: { className: sectionClassName, ...restSectionProps } = {} } = group;\n  return (\n    <SidebarGroup>\n      {group.title && <SidebarGroupLabel>{group.title}</SidebarGroupLabel>}\n      <SidebarGroupContent>\n        <SidebarMenu>\n          {renderItems(group.items, current)}\n        </SidebarMenu>\n      </SidebarGroupContent>\n    </SidebarGroup>\n  );\n}\n\nfunction isActive (current: string, item: NavLinkItem) {\n  return current === item.href || (!item.exact && current.startsWith(item.href));\n}\n\nconst renderItems = (items: NavItem[], current: string) => {\n  return (\n    <>\n      {items.map(item => (\n        isCustomItem(item)\n          ? <Fragment key={item.key}>{item.children}</Fragment>\n          : isParentItem(item)\n            ? <SiteParentItem key={item.key} current={current} active={!!item.children.find(child => isNavLinkItem(child) && isActive(current, child))} item={item} />\n            : <SiteNavLinkItem key={item.href} item={item} active={isActive(current, item)} />\n      ))}\n    </>\n  );\n};\n\nconst renderParentBaseItemContent = (item: NavParentItem) => {\n  return (\n    <>\n      {item.icon && <item.icon className=\"opacity-70\" />}\n      {item.title}\n    </>\n  );\n};\n\nconst renderParentItemChildren = (current: string, item: NavParentItem) => {\n  return (\n    <>\n      {item.children.map(item => (\n        <Fragment key={isCustomItem(item) ? item.key : item.href}>\n          {isCustomItem(item)\n            ? <Fragment key={item.key}>{item.children}</Fragment>\n            : <SiteNavLinkItem key={item.href} item={item} active={isActive(current, item)} sub />}\n        </Fragment>\n      ))}\n    </>\n  );\n};\n\nconst renderBaseItemContent = (item: NavBaseItem) => {\n  return (\n    <>\n      {item.icon && <item.icon className=\"opacity-70\" />}\n      {item.title}\n    </>\n  );\n};\n\nfunction SiteParentItem ({ current, item, active }: { current: string, item: NavParentItem, active: boolean }) {\n  let el: ReactElement<any> = renderParentBaseItemContent(item);\n\n  if (item.disabled && typeof item.disabled !== 'boolean') {\n    el = (\n      <Tooltip>\n        <TooltipTrigger asChild disabled={!!item.disabled}>\n          {el}\n        </TooltipTrigger>\n        <TooltipContent>\n          {item.disabled}\n        </TooltipContent>\n      </Tooltip>\n    );\n  }\n\n  return (\n    <Collapsible defaultOpen={active} className=\"group\" asChild>\n      <SidebarMenuItem>\n        <CollapsibleTrigger asChild disabled={!!item.disabled}>\n          <SidebarMenuButton isActive={active}>\n            {el}\n          </SidebarMenuButton>\n        </CollapsibleTrigger>\n        <SidebarMenuBadge>\n          <ChevronDownIcon className=\"size-4 transition-transform group-data-[state=open]:rotate-180\" />\n        </SidebarMenuBadge>\n        <CollapsibleContent asChild>\n          <SidebarMenuSub>\n            {renderParentItemChildren(current, item)}\n          </SidebarMenuSub>\n        </CollapsibleContent>\n      </SidebarMenuItem>\n    </Collapsible>\n  );\n}\n\nfunction SiteNavLinkItem ({ item, active, sub = false }: { item: NavLinkItem, active: boolean, sub?: boolean }) {\n  let el: ReactElement<any>;\n  let badge: ReactNode | undefined;\n\n  if (!!item.disabled) {\n    el = renderBaseItemContent(item);\n  } else {\n    el = renderBaseItemContent(item);\n  }\n\n  if (item.details) {\n    badge = item.details;\n  }\n\n  if (item.onDelete) {\n    el = (\n      <div className=\"flex gap-2 items-center\">\n        <div className=\"flex-1 overflow-hidden text-ellipsis text-nowrap\">\n          {el}\n        </div>\n        <AlertDialog>\n          <AlertDialogTrigger asChild>\n            <Button className=\"flex-shrink-0 w-max h-max rounded-full p-1 hover:bg-transparent\" size=\"icon\" variant=\"ghost\" disabled={!!item.disabled}>\n              <TrashIcon className=\"w-3 h-3 opacity-20 hover:opacity-60\" />\n            </Button>\n          </AlertDialogTrigger>\n          <AlertDialogContent>\n            <AlertDialogHeader>\n              <AlertDialogTitle>Are you sure to delete {item.deleteResourceName}?</AlertDialogTitle>\n              <AlertDialogDescription>\n                This action cannot be undone.\n              </AlertDialogDescription>\n            </AlertDialogHeader>\n            <AlertDialogFooter>\n              <AlertDialogCancel>Cancel</AlertDialogCancel>\n              <AlertDialogAction onClick={item.onDelete}>Delete</AlertDialogAction>\n            </AlertDialogFooter>\n          </AlertDialogContent>\n        </AlertDialog>\n      </div>\n    );\n  }\n\n  const MenuItem = sub ? SidebarMenuSubItem : SidebarMenuItem;\n  const MenuButton = sub ? SidebarMenuSubButton : SidebarMenuButton;\n\n  el = (\n    <MenuButton asChild isActive={active} disabled={!!item.disabled}>\n      <Link href={item.href}>\n        {el}\n      </Link>\n    </MenuButton>\n  );\n\n  if (item.disabled && typeof item.disabled !== 'boolean') {\n    el = (\n      <Tooltip>\n        <TooltipTrigger asChild disabled={!!item.disabled}>\n          {el}\n        </TooltipTrigger>\n        <TooltipContent>\n          {item.disabled}\n        </TooltipContent>\n      </Tooltip>\n    );\n  }\n\n  return (\n    <MenuItem>\n      {el}\n      {badge && <SidebarMenuBadge className=\"pointer-events-auto\">{badge}</SidebarMenuBadge>}\n    </MenuItem>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/system/BootstrapStatusProvider.tsx",
    "content": "'use client';\n\nimport type { BootstrapStatus } from '@/api/system';\nimport { createContext, type ReactNode, useContext } from 'react';\n\nconst BootstrapStatusContext = createContext<BootstrapStatus>({\n  required: {\n    knowledge_base: false,\n    default_embedding_model: false,\n    default_chat_engine: false,\n    default_llm: false,\n  },\n  optional: {\n    langfuse: false,\n    default_reranker: false,\n  },\n  need_migration: {\n    chat_engines_without_kb_configured: [-1],\n  },\n});\n\nexport function BootstrapStatusProvider ({ bootstrapStatus, children }: { bootstrapStatus: BootstrapStatus, children: ReactNode }) {\n  return <BootstrapStatusContext.Provider value={bootstrapStatus}>{children}</BootstrapStatusContext.Provider>;\n}\n\nexport function useBootstrapStatus () {\n  return useContext(BootstrapStatusContext);\n}"
  },
  {
    "path": "frontend/app/src/components/system/SystemWizardBanner.tsx",
    "content": "'use client';\n\nimport { isBootstrapStatusPassed } from '@/api/system';\nimport { useBootstrapStatus } from './BootstrapStatusProvider';\n\nexport function SystemWizardBanner () {\n  const bootstrapStatus = useBootstrapStatus();\n  const configured = isBootstrapStatusPassed(bootstrapStatus);\n\n  if (!configured) {\n    return (\n      <div className=\"absolute left-0 top-0 w-full p-1 text-xs text-center bg-warning/10 text-warning\">\n        This site is not ready to use yet. Please login or contact admin to finish setup configuration.\n      </div>\n    );\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/components/theme-toggle.tsx",
    "content": "'use client';\n\nimport { Button } from '@/components/ui/button';\nimport { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuTrigger } from '@/components/ui/dropdown-menu';\nimport { Moon, Sun } from 'lucide-react';\nimport { useTheme } from 'next-themes';\nimport * as React from 'react';\n\nexport function ThemeToggle () {\n  const { setTheme } = useTheme();\n\n  return (\n    <DropdownMenu>\n      <DropdownMenuTrigger asChild>\n        <Button variant=\"ghost\" size=\"icon\" className='text-foreground rounded-full'>\n          <Sun className=\"h-[1.2rem] w-[1.2rem] rotate-0 scale-100 transition-all dark:-rotate-90 dark:scale-0\" />\n          <Moon className=\"absolute h-[1.2rem] w-[1.2rem] rotate-90 scale-0 transition-all dark:rotate-0 dark:scale-100\" />\n          <span className=\"sr-only\">Toggle theme</span>\n        </Button>\n      </DropdownMenuTrigger>\n      <DropdownMenuContent align=\"end\" collisionPadding={8}>\n        <DropdownMenuItem onClick={() => setTheme('light')}>\n          Light\n        </DropdownMenuItem>\n        <DropdownMenuItem onClick={() => setTheme('dark')}>\n          Dark\n        </DropdownMenuItem>\n        <DropdownMenuItem onClick={() => setTheme('system')}>\n          System\n        </DropdownMenuItem>\n      </DropdownMenuContent>\n    </DropdownMenu>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/theme.stories.tsx",
    "content": "import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';\nimport { Button } from '@/components/ui/button';\nimport { Toaster } from '@/components/ui/sonner';\nimport { cn } from '@/lib/utils';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport { MailQuestionIcon } from 'lucide-react';\nimport type { ComponentProps } from 'react';\nimport { toast } from 'sonner';\n\nconst meta = {\n  title: 'Theme',\n  render () {\n    return (\n      <div className=\"space-y-5\">\n        <section>\n          <ColorExample name=\"Warning\" className=\"text-warning\" />\n          <ColorExample name=\"Info\" className=\"text-info\" />\n          <ColorExample name=\"Success\" className=\"text-success\" />\n        </section>\n        <section>\n          <AlertExample name=\"default\" />\n          <AlertExample name=\"destructive\" />\n          <AlertExample name=\"warning\" />\n          <AlertExample name=\"info\" />\n          <AlertExample name=\"success\" />\n        </section>\n        <section>\n          <Toaster cn={cn} />\n          <Button\n            onClick={() => {\n              toast.success('Success', {\n                description: 'Description is here',\n                icon: <MailQuestionIcon className=\"size-4\" />,\n              });\n            }}>\n            Success\n          </Button>\n          <Button\n            onClick={() => {\n              toast.warning('Warning', {\n                description: 'Description is here',\n                icon: <MailQuestionIcon className=\"size-4\" />,\n              });\n            }}>\n            Warning\n          </Button>\n          <Button\n            onClick={() => {\n              toast.info('Info', {\n                description: 'Description is here',\n                icon: <MailQuestionIcon className=\"size-4\" />,\n              });\n            }}>\n            Info\n          </Button>\n          <Button\n            onClick={() => {\n              toast.error('Error', {\n                description: 'Description is here',\n                icon: <MailQuestionIcon className=\"size-4\" />,\n              });\n            }}>\n            Error\n          </Button>\n        </section>\n      </div>\n    );\n  },\n} satisfies Meta;\n\nexport default meta;\n\nexport const Default = {} satisfies StoryObj<typeof meta>;\n\nfunction ColorExample ({ name, className }: { name: string, className: string }) {\n  return (\n    <div className=\"space-y-2\">\n      <h6 className=\"font-semibold\">{name}</h6>\n      <div className=\"flex gap-4 items-center\">\n        <div className=\"bg-background p-4\">\n        <span className={cn('flex gap-2 items-center p-2 border border-current rounded', className)}>\n          <span>\n            {className}\n          </span>\n          <span className=\"bg-current size-5 rounded\" />\n        </span>\n        </div>\n        <div className=\"dark bg-background p-4 rounded-lg\">\n        <span className={cn('flex gap-2 items-center p-2 border border-current rounded', className)}>\n          <span>\n            {className}\n          </span>\n          <span className=\"bg-current size-5 rounded\" />\n        </span>\n        </div>\n      </div>\n    </div>\n  );\n}\n\nfunction AlertExample ({ name }: { name: ComponentProps<typeof Alert>['variant'] }) {\n  return (\n    <div className=\"space-y-2\">\n      <h6 className=\"font-semibold\">{name}</h6>\n      <div className=\"flex gap-4 items-center\">\n        <div className=\"bg-background p-4\">\n          <Alert variant={name}>\n            <MailQuestionIcon />\n            <AlertTitle>Title</AlertTitle>\n            <AlertDescription>Description</AlertDescription>\n          </Alert>\n        </div>\n        <div className=\"dark bg-background p-4 rounded-lg\">\n          <Alert variant={name}>\n            <MailQuestionIcon />\n            <AlertTitle>Title</AlertTitle>\n            <AlertDescription>Description</AlertDescription>\n          </Alert>\n        </div>\n      </div>\n    </div>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/components/themed-style.ts",
    "content": "'use client';\n\nimport { type AvailableTheme, useActiveTheme } from '@/components/use-active-theme';\nimport { cloneElement, type CSSProperties, type ReactElement } from 'react';\n\nexport function ThemedStyle ({ children, ...themes }: { children: ReactElement<{ style?: CSSProperties }> } & Record<AvailableTheme, CSSProperties>) {\n  const theme = useActiveTheme();\n\n  return cloneElement(children, {\n    style: {\n      ...themes[theme],\n      ...children.props.style,\n    },\n  });\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/accordion.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as AccordionPrimitive from \"@radix-ui/react-accordion\"\nimport { ChevronDown } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Accordion = AccordionPrimitive.Root\n\nconst AccordionItem = React.forwardRef<\n  React.ElementRef<typeof AccordionPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof AccordionPrimitive.Item>\n>(({ className, ...props }, ref) => (\n  <AccordionPrimitive.Item\n    ref={ref}\n    className={cn(\"border-b\", className)}\n    {...props}\n  />\n))\nAccordionItem.displayName = \"AccordionItem\"\n\nconst AccordionTrigger = React.forwardRef<\n  React.ElementRef<typeof AccordionPrimitive.Trigger>,\n  React.ComponentPropsWithoutRef<typeof AccordionPrimitive.Trigger>\n>(({ className, children, ...props }, ref) => (\n  <AccordionPrimitive.Header className=\"flex\">\n    <AccordionPrimitive.Trigger\n      ref={ref}\n      className={cn(\n        \"flex flex-1 items-center justify-between py-4 font-medium transition-all [&[data-state=open]>svg]:rotate-180\",\n        props.disabled ? 'text-muted-foreground cursor-not-allowed' : 'hover:underline',\n        className\n      )}\n      {...props}\n    >\n      {children}\n      <ChevronDown className=\"h-4 w-4 shrink-0 transition-transform duration-200\" />\n    </AccordionPrimitive.Trigger>\n  </AccordionPrimitive.Header>\n))\nAccordionTrigger.displayName = AccordionPrimitive.Trigger.displayName\n\nconst AccordionContent = React.forwardRef<\n  React.ElementRef<typeof AccordionPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof AccordionPrimitive.Content>\n>(({ className, children, ...props }, ref) => (\n  <AccordionPrimitive.Content\n    ref={ref}\n    className=\"overflow-hidden text-sm transition-all data-[state=closed]:animate-accordion-up data-[state=open]:animate-accordion-down\"\n    {...props}\n  >\n    <div className={cn(\"pb-4 pt-0\", className)}>{children}</div>\n  </AccordionPrimitive.Content>\n))\n\nAccordionContent.displayName = AccordionPrimitive.Content.displayName\n\nexport { Accordion, AccordionItem, AccordionTrigger, AccordionContent }\n"
  },
  {
    "path": "frontend/app/src/components/ui/alert-dialog.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as AlertDialogPrimitive from \"@radix-ui/react-alert-dialog\"\n\nimport { cn } from \"@/lib/utils\"\nimport { buttonVariants } from \"@/components/ui/button\"\n\nconst AlertDialog = AlertDialogPrimitive.Root\n\nconst AlertDialogTrigger = AlertDialogPrimitive.Trigger\n\nconst AlertDialogPortal = AlertDialogPrimitive.Portal\n\nconst AlertDialogOverlay = React.forwardRef<\n  React.ElementRef<typeof AlertDialogPrimitive.Overlay>,\n  React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Overlay>\n>(({ className, ...props }, ref) => (\n  <AlertDialogPrimitive.Overlay\n    className={cn(\n      \"fixed inset-0 z-50 bg-black/80  data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0\",\n      className\n    )}\n    {...props}\n    ref={ref}\n  />\n))\nAlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName\n\nconst AlertDialogContent = React.forwardRef<\n  React.ElementRef<typeof AlertDialogPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Content>\n>(({ className, ...props }, ref) => (\n  <AlertDialogPortal>\n    <AlertDialogOverlay />\n    <AlertDialogPrimitive.Content\n      ref={ref}\n      className={cn(\n        \"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border bg-background p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg\",\n        className\n      )}\n      {...props}\n    />\n  </AlertDialogPortal>\n))\nAlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName\n\nconst AlertDialogHeader = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\n      \"flex flex-col space-y-2 text-center sm:text-left\",\n      className\n    )}\n    {...props}\n  />\n)\nAlertDialogHeader.displayName = \"AlertDialogHeader\"\n\nconst AlertDialogFooter = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\n      \"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2\",\n      className\n    )}\n    {...props}\n  />\n)\nAlertDialogFooter.displayName = \"AlertDialogFooter\"\n\nconst AlertDialogTitle = React.forwardRef<\n  React.ElementRef<typeof AlertDialogPrimitive.Title>,\n  React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Title>\n>(({ className, ...props }, ref) => (\n  <AlertDialogPrimitive.Title\n    ref={ref}\n    className={cn(\"text-lg font-semibold\", className)}\n    {...props}\n  />\n))\nAlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName\n\nconst AlertDialogDescription = React.forwardRef<\n  React.ElementRef<typeof AlertDialogPrimitive.Description>,\n  React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Description>\n>(({ className, ...props }, ref) => (\n  <AlertDialogPrimitive.Description\n    ref={ref}\n    className={cn(\"text-sm text-muted-foreground\", className)}\n    {...props}\n  />\n))\nAlertDialogDescription.displayName =\n  AlertDialogPrimitive.Description.displayName\n\nconst AlertDialogAction = React.forwardRef<\n  React.ElementRef<typeof AlertDialogPrimitive.Action>,\n  React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Action>\n>(({ className, ...props }, ref) => (\n  <AlertDialogPrimitive.Action\n    ref={ref}\n    className={cn(buttonVariants(), className)}\n    {...props}\n  />\n))\nAlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName\n\nconst AlertDialogCancel = React.forwardRef<\n  React.ElementRef<typeof AlertDialogPrimitive.Cancel>,\n  React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Cancel>\n>(({ className, ...props }, ref) => (\n  <AlertDialogPrimitive.Cancel\n    ref={ref}\n    className={cn(\n      buttonVariants({ variant: \"outline\" }),\n      \"mt-2 sm:mt-0\",\n      className\n    )}\n    {...props}\n  />\n))\nAlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName\n\nexport {\n  AlertDialog,\n  AlertDialogPortal,\n  AlertDialogOverlay,\n  AlertDialogTrigger,\n  AlertDialogContent,\n  AlertDialogHeader,\n  AlertDialogFooter,\n  AlertDialogTitle,\n  AlertDialogDescription,\n  AlertDialogAction,\n  AlertDialogCancel,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/alert.tsx",
    "content": "import * as React from \"react\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst alertVariants = cva(\n  \"relative w-full rounded-lg border p-4 [&>svg~*]:pl-7 [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-foreground\",\n  {\n    variants: {\n      variant: {\n        default: \"bg-background text-foreground\",\n        destructive:\n          \"border-destructive/50 text-destructive [&>svg]:text-destructive\",\n        warning:\n          \"border-warning/30 text-warning [&>svg]:text-warning\",\n        success:\n          \"border-success/30 text-success [&>svg]:text-success\",\n        info:\n          \"border-info/30 text-info [&>svg]:text-info\",\n      },\n    },\n    defaultVariants: {\n      variant: \"default\",\n    },\n  }\n)\n\nconst Alert = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement> & VariantProps<typeof alertVariants>\n>(({ className, variant, ...props }, ref) => (\n  <div\n    ref={ref}\n    role=\"alert\"\n    className={cn(alertVariants({ variant }), className)}\n    {...props}\n  />\n))\nAlert.displayName = \"Alert\"\n\nconst AlertTitle = React.forwardRef<\n  HTMLParagraphElement,\n  React.HTMLAttributes<HTMLHeadingElement>\n>(({ className, ...props }, ref) => (\n  <h5\n    ref={ref}\n    className={cn(\"mb-1 font-medium leading-none tracking-tight\", className)}\n    {...props}\n  />\n))\nAlertTitle.displayName = \"AlertTitle\"\n\nconst AlertDescription = React.forwardRef<\n  HTMLParagraphElement,\n  React.HTMLAttributes<HTMLParagraphElement>\n>(({ className, ...props }, ref) => (\n  <div\n    ref={ref}\n    className={cn(\"text-sm [&_p]:leading-relaxed\", className)}\n    {...props}\n  />\n))\nAlertDescription.displayName = \"AlertDescription\"\n\nexport { Alert, AlertTitle, AlertDescription }\n"
  },
  {
    "path": "frontend/app/src/components/ui/aspect-ratio.tsx",
    "content": "\"use client\"\n\nimport * as AspectRatioPrimitive from \"@radix-ui/react-aspect-ratio\"\n\nconst AspectRatio = AspectRatioPrimitive.Root\n\nexport { AspectRatio }\n"
  },
  {
    "path": "frontend/app/src/components/ui/avatar.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as AvatarPrimitive from \"@radix-ui/react-avatar\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Avatar = React.forwardRef<\n  React.ElementRef<typeof AvatarPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof AvatarPrimitive.Root>\n>(({ className, ...props }, ref) => (\n  <AvatarPrimitive.Root\n    ref={ref}\n    className={cn(\n      \"relative flex h-10 w-10 shrink-0 overflow-hidden rounded-full\",\n      className\n    )}\n    {...props}\n  />\n))\nAvatar.displayName = AvatarPrimitive.Root.displayName\n\nconst AvatarImage = React.forwardRef<\n  React.ElementRef<typeof AvatarPrimitive.Image>,\n  React.ComponentPropsWithoutRef<typeof AvatarPrimitive.Image>\n>(({ className, ...props }, ref) => (\n  <AvatarPrimitive.Image\n    ref={ref}\n    className={cn(\"aspect-square h-full w-full\", className)}\n    {...props}\n  />\n))\nAvatarImage.displayName = AvatarPrimitive.Image.displayName\n\nconst AvatarFallback = React.forwardRef<\n  React.ElementRef<typeof AvatarPrimitive.Fallback>,\n  React.ComponentPropsWithoutRef<typeof AvatarPrimitive.Fallback>\n>(({ className, ...props }, ref) => (\n  <AvatarPrimitive.Fallback\n    ref={ref}\n    className={cn(\n      \"flex h-full w-full items-center justify-center rounded-full bg-muted\",\n      className\n    )}\n    {...props}\n  />\n))\nAvatarFallback.displayName = AvatarPrimitive.Fallback.displayName\n\nexport { Avatar, AvatarImage, AvatarFallback }\n"
  },
  {
    "path": "frontend/app/src/components/ui/badge.tsx",
    "content": "import * as React from \"react\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst badgeVariants = cva(\n  \"inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2\",\n  {\n    variants: {\n      variant: {\n        default:\n          \"border-transparent bg-primary text-primary-foreground hover:bg-primary/80\",\n        secondary:\n          \"border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80\",\n        destructive:\n          \"border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80\",\n        outline: \"text-foreground\",\n      },\n    },\n    defaultVariants: {\n      variant: \"default\",\n    },\n  }\n)\n\nexport interface BadgeProps\n  extends React.HTMLAttributes<HTMLDivElement>,\n    VariantProps<typeof badgeVariants> {}\n\nfunction Badge({ className, variant, ...props }: BadgeProps) {\n  return (\n    <div className={cn(badgeVariants({ variant }), className)} {...props} />\n  )\n}\n\nexport { Badge, badgeVariants }\n"
  },
  {
    "path": "frontend/app/src/components/ui/breadcrumb.tsx",
    "content": "import * as React from \"react\"\nimport { Slot } from \"@radix-ui/react-slot\"\nimport { ChevronRight, MoreHorizontal } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Breadcrumb = React.forwardRef<\n  HTMLElement,\n  React.ComponentPropsWithoutRef<\"nav\"> & {\n    separator?: React.ReactNode\n  }\n>(({ ...props }, ref) => <nav ref={ref} aria-label=\"breadcrumb\" {...props} />)\nBreadcrumb.displayName = \"Breadcrumb\"\n\nconst BreadcrumbList = React.forwardRef<\n  HTMLOListElement,\n  React.ComponentPropsWithoutRef<\"ol\">\n>(({ className, ...props }, ref) => (\n  <ol\n    ref={ref}\n    className={cn(\n      \"flex flex-wrap items-center gap-1.5 break-words text-sm text-muted-foreground sm:gap-2.5\",\n      className\n    )}\n    {...props}\n  />\n))\nBreadcrumbList.displayName = \"BreadcrumbList\"\n\nconst BreadcrumbItem = React.forwardRef<\n  HTMLLIElement,\n  React.ComponentPropsWithoutRef<\"li\">\n>(({ className, ...props }, ref) => (\n  <li\n    ref={ref}\n    className={cn(\"inline-flex items-center gap-1.5\", className)}\n    {...props}\n  />\n))\nBreadcrumbItem.displayName = \"BreadcrumbItem\"\n\nconst BreadcrumbLink = React.forwardRef<\n  HTMLAnchorElement,\n  React.ComponentPropsWithoutRef<\"a\"> & {\n    asChild?: boolean\n  }\n>(({ asChild, className, ...props }, ref) => {\n  const Comp = asChild ? Slot : \"a\"\n\n  return (\n    <Comp\n      ref={ref}\n      className={cn(\"transition-colors hover:text-foreground\", className)}\n      {...props}\n    />\n  )\n})\nBreadcrumbLink.displayName = \"BreadcrumbLink\"\n\nconst BreadcrumbPage = React.forwardRef<\n  HTMLSpanElement,\n  React.ComponentPropsWithoutRef<\"span\">\n>(({ className, ...props }, ref) => (\n  <span\n    ref={ref}\n    role=\"link\"\n    aria-disabled=\"true\"\n    aria-current=\"page\"\n    className={cn(\"font-normal text-foreground\", className)}\n    {...props}\n  />\n))\nBreadcrumbPage.displayName = \"BreadcrumbPage\"\n\nconst BreadcrumbSeparator = ({\n  children,\n  className,\n  ...props\n}: React.ComponentProps<\"li\">) => (\n  <li\n    role=\"presentation\"\n    aria-hidden=\"true\"\n    className={cn(\"[&>svg]:size-3.5\", className)}\n    {...props}\n  >\n    {children ?? <ChevronRight />}\n  </li>\n)\nBreadcrumbSeparator.displayName = \"BreadcrumbSeparator\"\n\nconst BreadcrumbEllipsis = ({\n  className,\n  ...props\n}: React.ComponentProps<\"span\">) => (\n  <span\n    role=\"presentation\"\n    aria-hidden=\"true\"\n    className={cn(\"flex h-9 w-9 items-center justify-center\", className)}\n    {...props}\n  >\n    <MoreHorizontal className=\"h-4 w-4\" />\n    <span className=\"sr-only\">More</span>\n  </span>\n)\nBreadcrumbEllipsis.displayName = \"BreadcrumbElipssis\"\n\nexport {\n  Breadcrumb,\n  BreadcrumbList,\n  BreadcrumbItem,\n  BreadcrumbLink,\n  BreadcrumbPage,\n  BreadcrumbSeparator,\n  BreadcrumbEllipsis,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/button.tsx",
    "content": "import * as React from \"react\"\nimport { Slot } from \"@radix-ui/react-slot\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst buttonVariants = cva(\n  \"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0\",\n  {\n    variants: {\n      variant: {\n        default: \"bg-primary text-primary-foreground hover:bg-primary/90\",\n        destructive:\n          \"bg-destructive text-destructive-foreground hover:bg-destructive/90\",\n        outline:\n          \"border border-input bg-background hover:bg-accent hover:text-accent-foreground\",\n        secondary:\n          \"bg-secondary text-secondary-foreground hover:bg-secondary/80\",\n        ghost: \"hover:bg-accent hover:text-accent-foreground\",\n        link: \"text-primary underline-offset-4 hover:underline\",\n      },\n      size: {\n        default: \"h-10 px-4 py-2\",\n        sm: \"h-9 rounded-md px-3\",\n        lg: \"h-11 rounded-md px-8\",\n        icon: \"h-10 w-10\",\n      },\n    },\n    defaultVariants: {\n      variant: \"default\",\n      size: \"default\",\n    },\n  }\n)\n\nexport interface ButtonProps\n  extends React.ButtonHTMLAttributes<HTMLButtonElement>,\n    VariantProps<typeof buttonVariants> {\n  asChild?: boolean\n}\n\nconst Button = React.forwardRef<HTMLButtonElement, ButtonProps>(\n  ({ className, variant, size, asChild = false, ...props }, ref) => {\n    const Comp = asChild ? Slot : \"button\"\n    return (\n      <Comp\n        className={cn(buttonVariants({ variant, size, className }))}\n        ref={ref}\n        {...props}\n      />\n    )\n  }\n)\nButton.displayName = \"Button\"\n\nexport { Button, buttonVariants }\n"
  },
  {
    "path": "frontend/app/src/components/ui/calendar.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport { ChevronLeft, ChevronRight } from \"lucide-react\"\nimport { DayPicker } from \"react-day-picker\"\n\nimport { cn } from \"@/lib/utils\"\nimport { buttonVariants } from \"@/components/ui/button\"\n\nexport type CalendarProps = React.ComponentProps<typeof DayPicker>\n\nfunction Calendar({\n  className,\n  classNames,\n  showOutsideDays = true,\n  ...props\n}: CalendarProps) {\n  return (\n    <DayPicker\n      showOutsideDays={showOutsideDays}\n      className={cn(\"p-3\", className)}\n      classNames={{\n        months: \"flex flex-col sm:flex-row space-y-4 sm:space-x-4 sm:space-y-0\",\n        month: \"space-y-4\",\n        caption: \"flex justify-center pt-1 relative items-center\",\n        caption_label: \"text-sm font-medium\",\n        nav: \"space-x-1 flex items-center\",\n        nav_button: cn(\n          buttonVariants({ variant: \"outline\" }),\n          \"h-7 w-7 bg-transparent p-0 opacity-50 hover:opacity-100\"\n        ),\n        nav_button_previous: \"absolute left-1\",\n        nav_button_next: \"absolute right-1\",\n        table: \"w-full border-collapse space-y-1\",\n        head_row: \"flex\",\n        head_cell:\n          \"text-muted-foreground rounded-md w-9 font-normal text-[0.8rem]\",\n        row: \"flex w-full mt-2\",\n        cell: \"h-9 w-9 text-center text-sm p-0 relative [&:has([aria-selected].day-range-end)]:rounded-r-md [&:has([aria-selected].day-outside)]:bg-accent/50 [&:has([aria-selected])]:bg-accent first:[&:has([aria-selected])]:rounded-l-md last:[&:has([aria-selected])]:rounded-r-md focus-within:relative focus-within:z-20\",\n        day: cn(\n          buttonVariants({ variant: \"ghost\" }),\n          \"h-9 w-9 p-0 font-normal aria-selected:opacity-100\"\n        ),\n        day_range_end: \"day-range-end\",\n        day_selected:\n          \"bg-primary text-primary-foreground hover:bg-primary hover:text-primary-foreground focus:bg-primary focus:text-primary-foreground\",\n        day_today: \"bg-accent text-accent-foreground\",\n        day_outside:\n          \"day-outside text-muted-foreground aria-selected:bg-accent/50 aria-selected:text-muted-foreground\",\n        day_disabled: \"text-muted-foreground opacity-50\",\n        day_range_middle:\n          \"aria-selected:bg-accent aria-selected:text-accent-foreground\",\n        day_hidden: \"invisible\",\n        ...classNames,\n      }}\n      components={{\n        IconLeft: ({ className, ...props }) => (\n          <ChevronLeft className={cn(\"h-4 w-4\", className)} {...props} />\n        ),\n        IconRight: ({ className, ...props }) => (\n          <ChevronRight className={cn(\"h-4 w-4\", className)} {...props} />\n        ),\n      }}\n      {...props}\n    />\n  )\n}\nCalendar.displayName = \"Calendar\"\n\nexport { Calendar }\n"
  },
  {
    "path": "frontend/app/src/components/ui/card.tsx",
    "content": "import * as React from \"react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Card = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => (\n  <div\n    ref={ref}\n    className={cn(\n      \"rounded-lg border bg-card text-card-foreground shadow-sm\",\n      className\n    )}\n    {...props}\n  />\n))\nCard.displayName = \"Card\"\n\nconst CardHeader = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => (\n  <div\n    ref={ref}\n    className={cn(\"flex flex-col space-y-1.5 p-6\", className)}\n    {...props}\n  />\n))\nCardHeader.displayName = \"CardHeader\"\n\nconst CardTitle = React.forwardRef<\n  HTMLParagraphElement,\n  React.HTMLAttributes<HTMLHeadingElement>\n>(({ className, ...props }, ref) => (\n  <h3\n    ref={ref}\n    className={cn(\n      \"text-2xl font-semibold leading-none tracking-tight\",\n      className\n    )}\n    {...props}\n  />\n))\nCardTitle.displayName = \"CardTitle\"\n\nconst CardDescription = React.forwardRef<\n  HTMLParagraphElement,\n  React.HTMLAttributes<HTMLParagraphElement>\n>(({ className, ...props }, ref) => (\n  <p\n    ref={ref}\n    className={cn(\"text-sm text-muted-foreground\", className)}\n    {...props}\n  />\n))\nCardDescription.displayName = \"CardDescription\"\n\nconst CardContent = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => (\n  <div ref={ref} className={cn(\"p-6 pt-0\", className)} {...props} />\n))\nCardContent.displayName = \"CardContent\"\n\nconst CardFooter = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => (\n  <div\n    ref={ref}\n    className={cn(\"flex items-center p-6 pt-0\", className)}\n    {...props}\n  />\n))\nCardFooter.displayName = \"CardFooter\"\n\nexport { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent }\n"
  },
  {
    "path": "frontend/app/src/components/ui/carousel.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport useEmblaCarousel, {\n  type UseEmblaCarouselType,\n} from \"embla-carousel-react\"\nimport { ArrowLeft, ArrowRight } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\nimport { Button } from \"@/components/ui/button\"\n\ntype CarouselApi = UseEmblaCarouselType[1]\ntype UseCarouselParameters = Parameters<typeof useEmblaCarousel>\ntype CarouselOptions = UseCarouselParameters[0]\ntype CarouselPlugin = UseCarouselParameters[1]\n\ntype CarouselProps = {\n  opts?: CarouselOptions\n  plugins?: CarouselPlugin\n  orientation?: \"horizontal\" | \"vertical\"\n  setApi?: (api: CarouselApi) => void\n}\n\ntype CarouselContextProps = {\n  carouselRef: ReturnType<typeof useEmblaCarousel>[0]\n  api: ReturnType<typeof useEmblaCarousel>[1]\n  scrollPrev: () => void\n  scrollNext: () => void\n  canScrollPrev: boolean\n  canScrollNext: boolean\n} & CarouselProps\n\nconst CarouselContext = React.createContext<CarouselContextProps | null>(null)\n\nfunction useCarousel() {\n  const context = React.useContext(CarouselContext)\n\n  if (!context) {\n    throw new Error(\"useCarousel must be used within a <Carousel />\")\n  }\n\n  return context\n}\n\nconst Carousel = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement> & CarouselProps\n>(\n  (\n    {\n      orientation = \"horizontal\",\n      opts,\n      setApi,\n      plugins,\n      className,\n      children,\n      ...props\n    },\n    ref\n  ) => {\n    const [carouselRef, api] = useEmblaCarousel(\n      {\n        ...opts,\n        axis: orientation === \"horizontal\" ? \"x\" : \"y\",\n      },\n      plugins\n    )\n    const [canScrollPrev, setCanScrollPrev] = React.useState(false)\n    const [canScrollNext, setCanScrollNext] = React.useState(false)\n\n    const onSelect = React.useCallback((api: CarouselApi) => {\n      if (!api) {\n        return\n      }\n\n      setCanScrollPrev(api.canScrollPrev())\n      setCanScrollNext(api.canScrollNext())\n    }, [])\n\n    const scrollPrev = React.useCallback(() => {\n      api?.scrollPrev()\n    }, [api])\n\n    const scrollNext = React.useCallback(() => {\n      api?.scrollNext()\n    }, [api])\n\n    const handleKeyDown = React.useCallback(\n      (event: React.KeyboardEvent<HTMLDivElement>) => {\n        if (event.key === \"ArrowLeft\") {\n          event.preventDefault()\n          scrollPrev()\n        } else if (event.key === \"ArrowRight\") {\n          event.preventDefault()\n          scrollNext()\n        }\n      },\n      [scrollPrev, scrollNext]\n    )\n\n    React.useEffect(() => {\n      if (!api || !setApi) {\n        return\n      }\n\n      setApi(api)\n    }, [api, setApi])\n\n    React.useEffect(() => {\n      if (!api) {\n        return\n      }\n\n      onSelect(api)\n      api.on(\"reInit\", onSelect)\n      api.on(\"select\", onSelect)\n\n      return () => {\n        api?.off(\"select\", onSelect)\n      }\n    }, [api, onSelect])\n\n    return (\n      <CarouselContext.Provider\n        value={{\n          carouselRef,\n          api: api,\n          opts,\n          orientation:\n            orientation || (opts?.axis === \"y\" ? \"vertical\" : \"horizontal\"),\n          scrollPrev,\n          scrollNext,\n          canScrollPrev,\n          canScrollNext,\n        }}\n      >\n        <div\n          ref={ref}\n          onKeyDownCapture={handleKeyDown}\n          className={cn(\"relative\", className)}\n          role=\"region\"\n          aria-roledescription=\"carousel\"\n          {...props}\n        >\n          {children}\n        </div>\n      </CarouselContext.Provider>\n    )\n  }\n)\nCarousel.displayName = \"Carousel\"\n\nconst CarouselContent = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => {\n  const { carouselRef, orientation } = useCarousel()\n\n  return (\n    <div ref={carouselRef} className=\"overflow-hidden\">\n      <div\n        ref={ref}\n        className={cn(\n          \"flex\",\n          orientation === \"horizontal\" ? \"-ml-4\" : \"-mt-4 flex-col\",\n          className\n        )}\n        {...props}\n      />\n    </div>\n  )\n})\nCarouselContent.displayName = \"CarouselContent\"\n\nconst CarouselItem = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => {\n  const { orientation } = useCarousel()\n\n  return (\n    <div\n      ref={ref}\n      role=\"group\"\n      aria-roledescription=\"slide\"\n      className={cn(\n        \"min-w-0 shrink-0 grow-0 basis-full\",\n        orientation === \"horizontal\" ? \"pl-4\" : \"pt-4\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nCarouselItem.displayName = \"CarouselItem\"\n\nconst CarouselPrevious = React.forwardRef<\n  HTMLButtonElement,\n  React.ComponentProps<typeof Button>\n>(({ className, variant = \"outline\", size = \"icon\", ...props }, ref) => {\n  const { orientation, scrollPrev, canScrollPrev } = useCarousel()\n\n  return (\n    <Button\n      ref={ref}\n      variant={variant}\n      size={size}\n      className={cn(\n        \"absolute  h-8 w-8 rounded-full\",\n        orientation === \"horizontal\"\n          ? \"-left-12 top-1/2 -translate-y-1/2\"\n          : \"-top-12 left-1/2 -translate-x-1/2 rotate-90\",\n        className\n      )}\n      disabled={!canScrollPrev}\n      onClick={scrollPrev}\n      {...props}\n    >\n      <ArrowLeft className=\"h-4 w-4\" />\n      <span className=\"sr-only\">Previous slide</span>\n    </Button>\n  )\n})\nCarouselPrevious.displayName = \"CarouselPrevious\"\n\nconst CarouselNext = React.forwardRef<\n  HTMLButtonElement,\n  React.ComponentProps<typeof Button>\n>(({ className, variant = \"outline\", size = \"icon\", ...props }, ref) => {\n  const { orientation, scrollNext, canScrollNext } = useCarousel()\n\n  return (\n    <Button\n      ref={ref}\n      variant={variant}\n      size={size}\n      className={cn(\n        \"absolute h-8 w-8 rounded-full\",\n        orientation === \"horizontal\"\n          ? \"-right-12 top-1/2 -translate-y-1/2\"\n          : \"-bottom-12 left-1/2 -translate-x-1/2 rotate-90\",\n        className\n      )}\n      disabled={!canScrollNext}\n      onClick={scrollNext}\n      {...props}\n    >\n      <ArrowRight className=\"h-4 w-4\" />\n      <span className=\"sr-only\">Next slide</span>\n    </Button>\n  )\n})\nCarouselNext.displayName = \"CarouselNext\"\n\nexport {\n  type CarouselApi,\n  Carousel,\n  CarouselContent,\n  CarouselItem,\n  CarouselPrevious,\n  CarouselNext,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/chart.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as RechartsPrimitive from \"recharts\"\n\nimport { cn } from \"@/lib/utils\"\n\n// Format: { THEME_NAME: CSS_SELECTOR }\nconst THEMES = { light: \"\", dark: \".dark\" } as const\n\nexport type ChartConfig = {\n  [k in string]: {\n    label?: React.ReactNode\n    icon?: React.ComponentType\n  } & (\n    | { color?: string; theme?: never }\n    | { color?: never; theme: Record<keyof typeof THEMES, string> }\n  )\n}\n\ntype ChartContextProps = {\n  config: ChartConfig\n}\n\nconst ChartContext = React.createContext<ChartContextProps | null>(null)\n\nfunction useChart() {\n  const context = React.useContext(ChartContext)\n\n  if (!context) {\n    throw new Error(\"useChart must be used within a <ChartContainer />\")\n  }\n\n  return context\n}\n\nconst ChartContainer = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\"> & {\n    config: ChartConfig\n    children: React.ComponentProps<\n      typeof RechartsPrimitive.ResponsiveContainer\n    >[\"children\"]\n  }\n>(({ id, className, children, config, ...props }, ref) => {\n  const uniqueId = React.useId()\n  const chartId = `chart-${id || uniqueId.replace(/:/g, \"\")}`\n\n  return (\n    <ChartContext.Provider value={{ config }}>\n      <div\n        data-chart={chartId}\n        ref={ref}\n        className={cn(\n          \"flex aspect-video justify-center text-xs [&_.recharts-cartesian-axis-tick_text]:fill-muted-foreground [&_.recharts-cartesian-grid_line[stroke='#ccc']]:stroke-border/50 [&_.recharts-curve.recharts-tooltip-cursor]:stroke-border [&_.recharts-dot[stroke='#fff']]:stroke-transparent [&_.recharts-layer]:outline-none [&_.recharts-polar-grid_[stroke='#ccc']]:stroke-border [&_.recharts-radial-bar-background-sector]:fill-muted [&_.recharts-rectangle.recharts-tooltip-cursor]:fill-muted [&_.recharts-reference-line_[stroke='#ccc']]:stroke-border [&_.recharts-sector[stroke='#fff']]:stroke-transparent [&_.recharts-sector]:outline-none [&_.recharts-surface]:outline-none\",\n          className\n        )}\n        {...props}\n      >\n        <ChartStyle id={chartId} config={config} />\n        <RechartsPrimitive.ResponsiveContainer>\n          {children}\n        </RechartsPrimitive.ResponsiveContainer>\n      </div>\n    </ChartContext.Provider>\n  )\n})\nChartContainer.displayName = \"Chart\"\n\nconst ChartStyle = ({ id, config }: { id: string; config: ChartConfig }) => {\n  const colorConfig = Object.entries(config).filter(\n    ([_, config]) => config.theme || config.color\n  )\n\n  if (!colorConfig.length) {\n    return null\n  }\n\n  return (\n    <style\n      dangerouslySetInnerHTML={{\n        __html: Object.entries(THEMES)\n          .map(\n            ([theme, prefix]) => `\n${prefix} [data-chart=${id}] {\n${colorConfig\n  .map(([key, itemConfig]) => {\n    const color =\n      itemConfig.theme?.[theme as keyof typeof itemConfig.theme] ||\n      itemConfig.color\n    return color ? `  --color-${key}: ${color};` : null\n  })\n  .join(\"\\n\")}\n}\n`\n          )\n          .join(\"\\n\"),\n      }}\n    />\n  )\n}\n\nconst ChartTooltip = RechartsPrimitive.Tooltip\n\nconst ChartTooltipContent = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<typeof RechartsPrimitive.Tooltip> &\n    React.ComponentProps<\"div\"> & {\n      hideLabel?: boolean\n      hideIndicator?: boolean\n      indicator?: \"line\" | \"dot\" | \"dashed\"\n      nameKey?: string\n      labelKey?: string\n    }\n>(\n  (\n    {\n      active,\n      payload,\n      className,\n      indicator = \"dot\",\n      hideLabel = false,\n      hideIndicator = false,\n      label,\n      labelFormatter,\n      labelClassName,\n      formatter,\n      color,\n      nameKey,\n      labelKey,\n    },\n    ref\n  ) => {\n    const { config } = useChart()\n\n    const tooltipLabel = React.useMemo(() => {\n      if (hideLabel || !payload?.length) {\n        return null\n      }\n\n      const [item] = payload\n      const key = `${labelKey || item.dataKey || item.name || \"value\"}`\n      const itemConfig = getPayloadConfigFromPayload(config, item, key)\n      const value =\n        !labelKey && typeof label === \"string\"\n          ? config[label as keyof typeof config]?.label || label\n          : itemConfig?.label\n\n      if (labelFormatter) {\n        return (\n          <div className={cn(\"font-medium\", labelClassName)}>\n            {labelFormatter(value, payload)}\n          </div>\n        )\n      }\n\n      if (!value) {\n        return null\n      }\n\n      return <div className={cn(\"font-medium\", labelClassName)}>{value}</div>\n    }, [\n      label,\n      labelFormatter,\n      payload,\n      hideLabel,\n      labelClassName,\n      config,\n      labelKey,\n    ])\n\n    if (!active || !payload?.length) {\n      return null\n    }\n\n    const nestLabel = payload.length === 1 && indicator !== \"dot\"\n\n    return (\n      <div\n        ref={ref}\n        className={cn(\n          \"grid min-w-[8rem] items-start gap-1.5 rounded-lg border border-border/50 bg-background px-2.5 py-1.5 text-xs shadow-xl\",\n          className\n        )}\n      >\n        {!nestLabel ? tooltipLabel : null}\n        <div className=\"grid gap-1.5\">\n          {payload.map((item, index) => {\n            const key = `${nameKey || item.name || item.dataKey || \"value\"}`\n            const itemConfig = getPayloadConfigFromPayload(config, item, key)\n            const indicatorColor = color || item.payload.fill || item.color\n\n            return (\n              <div\n                key={item.dataKey}\n                className={cn(\n                  \"flex w-full flex-wrap items-stretch gap-2 [&>svg]:h-2.5 [&>svg]:w-2.5 [&>svg]:text-muted-foreground\",\n                  indicator === \"dot\" && \"items-center\"\n                )}\n              >\n                {formatter && item?.value !== undefined && item.name ? (\n                  formatter(item.value, item.name, item, index, item.payload)\n                ) : (\n                  <>\n                    {itemConfig?.icon ? (\n                      <itemConfig.icon />\n                    ) : (\n                      !hideIndicator && (\n                        <div\n                          className={cn(\n                            \"shrink-0 rounded-[2px] border-[--color-border] bg-[--color-bg]\",\n                            {\n                              \"h-2.5 w-2.5\": indicator === \"dot\",\n                              \"w-1\": indicator === \"line\",\n                              \"w-0 border-[1.5px] border-dashed bg-transparent\":\n                                indicator === \"dashed\",\n                              \"my-0.5\": nestLabel && indicator === \"dashed\",\n                            }\n                          )}\n                          style={\n                            {\n                              \"--color-bg\": indicatorColor,\n                              \"--color-border\": indicatorColor,\n                            } as React.CSSProperties\n                          }\n                        />\n                      )\n                    )}\n                    <div\n                      className={cn(\n                        \"flex flex-1 justify-between leading-none\",\n                        nestLabel ? \"items-end\" : \"items-center\"\n                      )}\n                    >\n                      <div className=\"grid gap-1.5\">\n                        {nestLabel ? tooltipLabel : null}\n                        <span className=\"text-muted-foreground\">\n                          {itemConfig?.label || item.name}\n                        </span>\n                      </div>\n                      {item.value && (\n                        <span className=\"font-mono font-medium tabular-nums text-foreground\">\n                          {item.value.toLocaleString()}\n                        </span>\n                      )}\n                    </div>\n                  </>\n                )}\n              </div>\n            )\n          })}\n        </div>\n      </div>\n    )\n  }\n)\nChartTooltipContent.displayName = \"ChartTooltip\"\n\nconst ChartLegend = RechartsPrimitive.Legend\n\nconst ChartLegendContent = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\"> &\n    Pick<RechartsPrimitive.LegendProps, \"payload\" | \"verticalAlign\"> & {\n      hideIcon?: boolean\n      nameKey?: string\n    }\n>(\n  (\n    { className, hideIcon = false, payload, verticalAlign = \"bottom\", nameKey },\n    ref\n  ) => {\n    const { config } = useChart()\n\n    if (!payload?.length) {\n      return null\n    }\n\n    return (\n      <div\n        ref={ref}\n        className={cn(\n          \"flex items-center justify-center gap-4\",\n          verticalAlign === \"top\" ? \"pb-3\" : \"pt-3\",\n          className\n        )}\n      >\n        {payload.map((item) => {\n          const key = `${nameKey || item.dataKey || \"value\"}`\n          const itemConfig = getPayloadConfigFromPayload(config, item, key)\n\n          return (\n            <div\n              key={item.value}\n              className={cn(\n                \"flex items-center gap-1.5 [&>svg]:h-3 [&>svg]:w-3 [&>svg]:text-muted-foreground\"\n              )}\n            >\n              {itemConfig?.icon && !hideIcon ? (\n                <itemConfig.icon />\n              ) : (\n                <div\n                  className=\"h-2 w-2 shrink-0 rounded-[2px]\"\n                  style={{\n                    backgroundColor: item.color,\n                  }}\n                />\n              )}\n              {itemConfig?.label}\n            </div>\n          )\n        })}\n      </div>\n    )\n  }\n)\nChartLegendContent.displayName = \"ChartLegend\"\n\n// Helper to extract item config from a payload.\nfunction getPayloadConfigFromPayload(\n  config: ChartConfig,\n  payload: unknown,\n  key: string\n) {\n  if (typeof payload !== \"object\" || payload === null) {\n    return undefined\n  }\n\n  const payloadPayload =\n    \"payload\" in payload &&\n    typeof payload.payload === \"object\" &&\n    payload.payload !== null\n      ? payload.payload\n      : undefined\n\n  let configLabelKey: string = key\n\n  if (\n    key in payload &&\n    typeof payload[key as keyof typeof payload] === \"string\"\n  ) {\n    configLabelKey = payload[key as keyof typeof payload] as string\n  } else if (\n    payloadPayload &&\n    key in payloadPayload &&\n    typeof payloadPayload[key as keyof typeof payloadPayload] === \"string\"\n  ) {\n    configLabelKey = payloadPayload[\n      key as keyof typeof payloadPayload\n    ] as string\n  }\n\n  return configLabelKey in config\n    ? config[configLabelKey]\n    : config[key as keyof typeof config]\n}\n\nexport {\n  ChartContainer,\n  ChartTooltip,\n  ChartTooltipContent,\n  ChartLegend,\n  ChartLegendContent,\n  ChartStyle,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/checkbox.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as CheckboxPrimitive from \"@radix-ui/react-checkbox\"\nimport { Check } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Checkbox = React.forwardRef<\n  React.ElementRef<typeof CheckboxPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof CheckboxPrimitive.Root>\n>(({ className, ...props }, ref) => (\n  <CheckboxPrimitive.Root\n    ref={ref}\n    className={cn(\n      \"peer h-4 w-4 shrink-0 rounded-sm border border-primary ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 data-[state=checked]:bg-primary data-[state=checked]:text-primary-foreground\",\n      className\n    )}\n    {...props}\n  >\n    <CheckboxPrimitive.Indicator\n      className={cn(\"flex items-center justify-center text-current\")}\n    >\n      <Check className=\"h-4 w-4\" />\n    </CheckboxPrimitive.Indicator>\n  </CheckboxPrimitive.Root>\n))\nCheckbox.displayName = CheckboxPrimitive.Root.displayName\n\nexport { Checkbox }\n"
  },
  {
    "path": "frontend/app/src/components/ui/collapsible.tsx",
    "content": "\"use client\"\n\nimport * as CollapsiblePrimitive from \"@radix-ui/react-collapsible\"\n\nconst Collapsible = CollapsiblePrimitive.Root\n\nconst CollapsibleTrigger = CollapsiblePrimitive.CollapsibleTrigger\n\nconst CollapsibleContent = CollapsiblePrimitive.CollapsibleContent\n\nexport { Collapsible, CollapsibleTrigger, CollapsibleContent }\n"
  },
  {
    "path": "frontend/app/src/components/ui/command.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport { type DialogProps } from \"@radix-ui/react-dialog\"\nimport { Command as CommandPrimitive } from \"cmdk\"\nimport { Search } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\nimport { Dialog, DialogContent } from \"@/components/ui/dialog\"\n\nconst Command = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive>\n>(({ className, ...props }, ref) => (\n  <CommandPrimitive\n    ref={ref}\n    className={cn(\n      \"flex h-full w-full flex-col overflow-hidden rounded-md bg-popover text-popover-foreground\",\n      className\n    )}\n    {...props}\n  />\n))\nCommand.displayName = CommandPrimitive.displayName\n\ninterface CommandDialogProps extends DialogProps {}\n\nconst CommandDialog = ({ children, ...props }: CommandDialogProps) => {\n  return (\n    <Dialog {...props}>\n      <DialogContent className=\"overflow-hidden p-0 shadow-lg\">\n        <Command className=\"[&_[cmdk-group-heading]]:px-2 [&_[cmdk-group-heading]]:font-medium [&_[cmdk-group-heading]]:text-muted-foreground [&_[cmdk-group]:not([hidden])_~[cmdk-group]]:pt-0 [&_[cmdk-group]]:px-2 [&_[cmdk-input-wrapper]_svg]:h-5 [&_[cmdk-input-wrapper]_svg]:w-5 [&_[cmdk-input]]:h-12 [&_[cmdk-item]]:px-2 [&_[cmdk-item]]:py-3 [&_[cmdk-item]_svg]:h-5 [&_[cmdk-item]_svg]:w-5\">\n          {children}\n        </Command>\n      </DialogContent>\n    </Dialog>\n  )\n}\n\nconst CommandInput = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive.Input>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive.Input>\n>(({ className, ...props }, ref) => (\n  <div className=\"flex items-center border-b px-3\" cmdk-input-wrapper=\"\">\n    <Search className=\"mr-2 h-4 w-4 shrink-0 opacity-50\" />\n    <CommandPrimitive.Input\n      ref={ref}\n      className={cn(\n        \"flex h-11 w-full rounded-md bg-transparent py-3 text-sm outline-none placeholder:text-muted-foreground disabled:cursor-not-allowed disabled:opacity-50\",\n        className\n      )}\n      {...props}\n    />\n  </div>\n))\n\nCommandInput.displayName = CommandPrimitive.Input.displayName\n\nconst CommandList = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive.List>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive.List>\n>(({ className, ...props }, ref) => (\n  <CommandPrimitive.List\n    ref={ref}\n    className={cn(\"max-h-[300px] overflow-y-auto overflow-x-hidden\", className)}\n    {...props}\n  />\n))\n\nCommandList.displayName = CommandPrimitive.List.displayName\n\nconst CommandEmpty = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive.Empty>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive.Empty>\n>((props, ref) => (\n  <CommandPrimitive.Empty\n    ref={ref}\n    className=\"py-6 text-center text-sm\"\n    {...props}\n  />\n))\n\nCommandEmpty.displayName = CommandPrimitive.Empty.displayName\n\nconst CommandGroup = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive.Group>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive.Group>\n>(({ className, ...props }, ref) => (\n  <CommandPrimitive.Group\n    ref={ref}\n    className={cn(\n      \"overflow-hidden p-1 text-foreground [&_[cmdk-group-heading]]:px-2 [&_[cmdk-group-heading]]:py-1.5 [&_[cmdk-group-heading]]:text-xs [&_[cmdk-group-heading]]:font-medium [&_[cmdk-group-heading]]:text-muted-foreground\",\n      className\n    )}\n    {...props}\n  />\n))\n\nCommandGroup.displayName = CommandPrimitive.Group.displayName\n\nconst CommandSeparator = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive.Separator>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive.Separator>\n>(({ className, ...props }, ref) => (\n  <CommandPrimitive.Separator\n    ref={ref}\n    className={cn(\"-mx-1 h-px bg-border\", className)}\n    {...props}\n  />\n))\nCommandSeparator.displayName = CommandPrimitive.Separator.displayName\n\nconst CommandItem = React.forwardRef<\n  React.ElementRef<typeof CommandPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof CommandPrimitive.Item>\n>(({ className, ...props }, ref) => (\n  <CommandPrimitive.Item\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none data-[disabled=true]:pointer-events-none data-[selected='true']:bg-accent data-[selected=true]:text-accent-foreground data-[disabled=true]:opacity-50\",\n      className\n    )}\n    {...props}\n  />\n))\n\nCommandItem.displayName = CommandPrimitive.Item.displayName\n\nconst CommandShortcut = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLSpanElement>) => {\n  return (\n    <span\n      className={cn(\n        \"ml-auto text-xs tracking-widest text-muted-foreground\",\n        className\n      )}\n      {...props}\n    />\n  )\n}\nCommandShortcut.displayName = \"CommandShortcut\"\n\nexport {\n  Command,\n  CommandDialog,\n  CommandInput,\n  CommandList,\n  CommandEmpty,\n  CommandGroup,\n  CommandItem,\n  CommandShortcut,\n  CommandSeparator,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/context-menu.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as ContextMenuPrimitive from \"@radix-ui/react-context-menu\"\nimport { Check, ChevronRight, Circle } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst ContextMenu = ContextMenuPrimitive.Root\n\nconst ContextMenuTrigger = ContextMenuPrimitive.Trigger\n\nconst ContextMenuGroup = ContextMenuPrimitive.Group\n\nconst ContextMenuPortal = ContextMenuPrimitive.Portal\n\nconst ContextMenuSub = ContextMenuPrimitive.Sub\n\nconst ContextMenuRadioGroup = ContextMenuPrimitive.RadioGroup\n\nconst ContextMenuSubTrigger = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.SubTrigger>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.SubTrigger> & {\n    inset?: boolean\n  }\n>(({ className, inset, children, ...props }, ref) => (\n  <ContextMenuPrimitive.SubTrigger\n    ref={ref}\n    className={cn(\n      \"flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[state=open]:bg-accent data-[state=open]:text-accent-foreground\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  >\n    {children}\n    <ChevronRight className=\"ml-auto h-4 w-4\" />\n  </ContextMenuPrimitive.SubTrigger>\n))\nContextMenuSubTrigger.displayName = ContextMenuPrimitive.SubTrigger.displayName\n\nconst ContextMenuSubContent = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.SubContent>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.SubContent>\n>(({ className, ...props }, ref) => (\n  <ContextMenuPrimitive.SubContent\n    ref={ref}\n    className={cn(\n      \"z-50 min-w-[8rem] overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground shadow-md data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n      className\n    )}\n    {...props}\n  />\n))\nContextMenuSubContent.displayName = ContextMenuPrimitive.SubContent.displayName\n\nconst ContextMenuContent = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.Content>\n>(({ className, ...props }, ref) => (\n  <ContextMenuPrimitive.Portal>\n    <ContextMenuPrimitive.Content\n      ref={ref}\n      className={cn(\n        \"z-50 min-w-[8rem] overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground shadow-md animate-in fade-in-80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n        className\n      )}\n      {...props}\n    />\n  </ContextMenuPrimitive.Portal>\n))\nContextMenuContent.displayName = ContextMenuPrimitive.Content.displayName\n\nconst ContextMenuItem = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.Item> & {\n    inset?: boolean\n  }\n>(({ className, inset, ...props }, ref) => (\n  <ContextMenuPrimitive.Item\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  />\n))\nContextMenuItem.displayName = ContextMenuPrimitive.Item.displayName\n\nconst ContextMenuCheckboxItem = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.CheckboxItem>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.CheckboxItem>\n>(({ className, children, checked, ...props }, ref) => (\n  <ContextMenuPrimitive.CheckboxItem\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    checked={checked}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <ContextMenuPrimitive.ItemIndicator>\n        <Check className=\"h-4 w-4\" />\n      </ContextMenuPrimitive.ItemIndicator>\n    </span>\n    {children}\n  </ContextMenuPrimitive.CheckboxItem>\n))\nContextMenuCheckboxItem.displayName =\n  ContextMenuPrimitive.CheckboxItem.displayName\n\nconst ContextMenuRadioItem = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.RadioItem>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.RadioItem>\n>(({ className, children, ...props }, ref) => (\n  <ContextMenuPrimitive.RadioItem\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <ContextMenuPrimitive.ItemIndicator>\n        <Circle className=\"h-2 w-2 fill-current\" />\n      </ContextMenuPrimitive.ItemIndicator>\n    </span>\n    {children}\n  </ContextMenuPrimitive.RadioItem>\n))\nContextMenuRadioItem.displayName = ContextMenuPrimitive.RadioItem.displayName\n\nconst ContextMenuLabel = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.Label>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.Label> & {\n    inset?: boolean\n  }\n>(({ className, inset, ...props }, ref) => (\n  <ContextMenuPrimitive.Label\n    ref={ref}\n    className={cn(\n      \"px-2 py-1.5 text-sm font-semibold text-foreground\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  />\n))\nContextMenuLabel.displayName = ContextMenuPrimitive.Label.displayName\n\nconst ContextMenuSeparator = React.forwardRef<\n  React.ElementRef<typeof ContextMenuPrimitive.Separator>,\n  React.ComponentPropsWithoutRef<typeof ContextMenuPrimitive.Separator>\n>(({ className, ...props }, ref) => (\n  <ContextMenuPrimitive.Separator\n    ref={ref}\n    className={cn(\"-mx-1 my-1 h-px bg-border\", className)}\n    {...props}\n  />\n))\nContextMenuSeparator.displayName = ContextMenuPrimitive.Separator.displayName\n\nconst ContextMenuShortcut = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLSpanElement>) => {\n  return (\n    <span\n      className={cn(\n        \"ml-auto text-xs tracking-widest text-muted-foreground\",\n        className\n      )}\n      {...props}\n    />\n  )\n}\nContextMenuShortcut.displayName = \"ContextMenuShortcut\"\n\nexport {\n  ContextMenu,\n  ContextMenuTrigger,\n  ContextMenuContent,\n  ContextMenuItem,\n  ContextMenuCheckboxItem,\n  ContextMenuRadioItem,\n  ContextMenuLabel,\n  ContextMenuSeparator,\n  ContextMenuShortcut,\n  ContextMenuGroup,\n  ContextMenuPortal,\n  ContextMenuSub,\n  ContextMenuSubContent,\n  ContextMenuSubTrigger,\n  ContextMenuRadioGroup,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/dialog.tsx",
    "content": "\"use client\"\n\nimport { usePortalContainer } from '@/components/portal-provider';\nimport * as React from \"react\"\nimport * as DialogPrimitive from \"@radix-ui/react-dialog\"\nimport { X } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Dialog = DialogPrimitive.Root\n\nconst DialogTrigger = DialogPrimitive.Trigger\n\nconst DialogPortal = DialogPrimitive.Portal\n\nconst DialogClose = DialogPrimitive.Close\n\nconst DialogOverlay = React.forwardRef<\n  React.ElementRef<typeof DialogPrimitive.Overlay>,\n  React.ComponentPropsWithoutRef<typeof DialogPrimitive.Overlay>\n>(({ className, ...props }, ref) => (\n  <DialogPrimitive.Overlay\n    ref={ref}\n    className={cn(\n      \"fixed inset-0 z-50 bg-black/80  data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0\",\n      className\n    )}\n    {...props}\n  />\n))\nDialogOverlay.displayName = DialogPrimitive.Overlay.displayName\n\nconst DialogContent = React.forwardRef<\n  React.ElementRef<typeof DialogPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof DialogPrimitive.Content> & { /** @deprecated */container?: HTMLElement }\n>(({ className, container, children, ...props }, ref) => (\n  <DialogPortal container={usePortalContainer()}>\n    <DialogOverlay />\n    <DialogPrimitive.Content\n      ref={ref}\n      className={cn(\n        \"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border bg-background p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg\",\n        className\n      )}\n      {...props}\n    >\n      {children}\n      <DialogPrimitive.Close className=\"absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-accent data-[state=open]:text-muted-foreground\">\n        <X className=\"h-4 w-4\" />\n        <span className=\"sr-only\">Close</span>\n      </DialogPrimitive.Close>\n    </DialogPrimitive.Content>\n  </DialogPortal>\n))\nDialogContent.displayName = DialogPrimitive.Content.displayName\n\nconst DialogHeader = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\n      \"flex flex-col space-y-1.5 text-center sm:text-left\",\n      className\n    )}\n    {...props}\n  />\n)\nDialogHeader.displayName = \"DialogHeader\"\n\nconst DialogFooter = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\n      \"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2\",\n      className\n    )}\n    {...props}\n  />\n)\nDialogFooter.displayName = \"DialogFooter\"\n\nconst DialogTitle = React.forwardRef<\n  React.ElementRef<typeof DialogPrimitive.Title>,\n  React.ComponentPropsWithoutRef<typeof DialogPrimitive.Title>\n>(({ className, ...props }, ref) => (\n  <DialogPrimitive.Title\n    ref={ref}\n    className={cn(\n      \"text-lg font-semibold leading-none tracking-tight\",\n      className\n    )}\n    {...props}\n  />\n))\nDialogTitle.displayName = DialogPrimitive.Title.displayName\n\nconst DialogDescription = React.forwardRef<\n  React.ElementRef<typeof DialogPrimitive.Description>,\n  React.ComponentPropsWithoutRef<typeof DialogPrimitive.Description>\n>(({ className, ...props }, ref) => (\n  <DialogPrimitive.Description\n    ref={ref}\n    className={cn(\"text-sm text-muted-foreground\", className)}\n    {...props}\n  />\n))\nDialogDescription.displayName = DialogPrimitive.Description.displayName\n\nexport {\n  Dialog,\n  DialogPortal,\n  DialogOverlay,\n  DialogClose,\n  DialogTrigger,\n  DialogContent,\n  DialogHeader,\n  DialogFooter,\n  DialogTitle,\n  DialogDescription,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/dot-pattern.tsx",
    "content": "import { useId } from \"react\";\n\nimport { cn } from \"@/lib/utils\";\n\ninterface DotPatternProps {\n  width?: any;\n  height?: any;\n  x?: any;\n  y?: any;\n  cx?: any;\n  cy?: any;\n  cr?: any;\n  className?: string;\n  [key: string]: any;\n}\nexport function DotPattern({\n  width = 16,\n  height = 16,\n  x = 0,\n  y = 0,\n  cx = 1,\n  cy = 1,\n  cr = 1,\n  className,\n  ...props\n}: DotPatternProps) {\n  const id = useId();\n\n  return (\n    <svg\n      aria-hidden=\"true\"\n      className={cn(\n        \"pointer-events-none absolute inset-0 h-full w-full fill-neutral-400/80\",\n        className,\n      )}\n      {...props}\n    >\n      <defs>\n        <pattern\n          id={id}\n          width={width}\n          height={height}\n          patternUnits=\"userSpaceOnUse\"\n          patternContentUnits=\"userSpaceOnUse\"\n          x={x}\n          y={y}\n        >\n          <circle id=\"pattern-circle\" cx={cx} cy={cy} r={cr} />\n        </pattern>\n      </defs>\n      <rect width=\"100%\" height=\"100%\" strokeWidth={0} fill={`url(#${id})`} />\n    </svg>\n  );\n}\n\nexport default DotPattern;\n"
  },
  {
    "path": "frontend/app/src/components/ui/drawer.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport { Drawer as DrawerPrimitive } from \"vaul\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Drawer = ({\n  shouldScaleBackground = true,\n  ...props\n}: React.ComponentProps<typeof DrawerPrimitive.Root>) => (\n  <DrawerPrimitive.Root\n    shouldScaleBackground={shouldScaleBackground}\n    {...props}\n  />\n)\nDrawer.displayName = \"Drawer\"\n\nconst DrawerTrigger = DrawerPrimitive.Trigger\n\nconst DrawerPortal = DrawerPrimitive.Portal\n\nconst DrawerClose = DrawerPrimitive.Close\n\nconst DrawerOverlay = React.forwardRef<\n  React.ElementRef<typeof DrawerPrimitive.Overlay>,\n  React.ComponentPropsWithoutRef<typeof DrawerPrimitive.Overlay>\n>(({ className, ...props }, ref) => (\n  <DrawerPrimitive.Overlay\n    ref={ref}\n    className={cn(\"fixed inset-0 z-50 bg-black/80\", className)}\n    {...props}\n  />\n))\nDrawerOverlay.displayName = DrawerPrimitive.Overlay.displayName\n\nconst DrawerContent = React.forwardRef<\n  React.ElementRef<typeof DrawerPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof DrawerPrimitive.Content>\n>(({ className, children, ...props }, ref) => (\n  <DrawerPortal>\n    <DrawerOverlay />\n    <DrawerPrimitive.Content\n      ref={ref}\n      className={cn(\n        \"fixed inset-x-0 bottom-0 z-50 mt-24 flex h-auto flex-col rounded-t-[10px] border bg-background\",\n        className\n      )}\n      {...props}\n    >\n      <div className=\"mx-auto mt-4 h-2 w-[100px] rounded-full bg-muted\" />\n      {children}\n    </DrawerPrimitive.Content>\n  </DrawerPortal>\n))\nDrawerContent.displayName = \"DrawerContent\"\n\nconst DrawerHeader = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\"grid gap-1.5 p-4 text-center sm:text-left\", className)}\n    {...props}\n  />\n)\nDrawerHeader.displayName = \"DrawerHeader\"\n\nconst DrawerFooter = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\"mt-auto flex flex-col gap-2 p-4\", className)}\n    {...props}\n  />\n)\nDrawerFooter.displayName = \"DrawerFooter\"\n\nconst DrawerTitle = React.forwardRef<\n  React.ElementRef<typeof DrawerPrimitive.Title>,\n  React.ComponentPropsWithoutRef<typeof DrawerPrimitive.Title>\n>(({ className, ...props }, ref) => (\n  <DrawerPrimitive.Title\n    ref={ref}\n    className={cn(\n      \"text-lg font-semibold leading-none tracking-tight\",\n      className\n    )}\n    {...props}\n  />\n))\nDrawerTitle.displayName = DrawerPrimitive.Title.displayName\n\nconst DrawerDescription = React.forwardRef<\n  React.ElementRef<typeof DrawerPrimitive.Description>,\n  React.ComponentPropsWithoutRef<typeof DrawerPrimitive.Description>\n>(({ className, ...props }, ref) => (\n  <DrawerPrimitive.Description\n    ref={ref}\n    className={cn(\"text-sm text-muted-foreground\", className)}\n    {...props}\n  />\n))\nDrawerDescription.displayName = DrawerPrimitive.Description.displayName\n\nexport {\n  Drawer,\n  DrawerPortal,\n  DrawerOverlay,\n  DrawerTrigger,\n  DrawerClose,\n  DrawerContent,\n  DrawerHeader,\n  DrawerFooter,\n  DrawerTitle,\n  DrawerDescription,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/dropdown-menu.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as DropdownMenuPrimitive from \"@radix-ui/react-dropdown-menu\"\nimport { Check, ChevronRight, Circle } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst DropdownMenu = DropdownMenuPrimitive.Root\n\nconst DropdownMenuTrigger = DropdownMenuPrimitive.Trigger\n\nconst DropdownMenuGroup = DropdownMenuPrimitive.Group\n\nconst DropdownMenuPortal = DropdownMenuPrimitive.Portal\n\nconst DropdownMenuSub = DropdownMenuPrimitive.Sub\n\nconst DropdownMenuRadioGroup = DropdownMenuPrimitive.RadioGroup\n\nconst DropdownMenuSubTrigger = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.SubTrigger>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.SubTrigger> & {\n    inset?: boolean\n  }\n>(({ className, inset, children, ...props }, ref) => (\n  <DropdownMenuPrimitive.SubTrigger\n    ref={ref}\n    className={cn(\n      \"flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none focus:bg-accent data-[state=open]:bg-accent\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  >\n    {children}\n    <ChevronRight className=\"ml-auto h-4 w-4\" />\n  </DropdownMenuPrimitive.SubTrigger>\n))\nDropdownMenuSubTrigger.displayName =\n  DropdownMenuPrimitive.SubTrigger.displayName\n\nconst DropdownMenuSubContent = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.SubContent>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.SubContent>\n>(({ className, ...props }, ref) => (\n  <DropdownMenuPrimitive.SubContent\n    ref={ref}\n    className={cn(\n      \"z-50 min-w-[8rem] overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground shadow-lg data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n      className\n    )}\n    {...props}\n  />\n))\nDropdownMenuSubContent.displayName =\n  DropdownMenuPrimitive.SubContent.displayName\n\nconst DropdownMenuContent = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.Content>\n>(({ className, sideOffset = 4, ...props }, ref) => (\n  <DropdownMenuPrimitive.Portal>\n    <DropdownMenuPrimitive.Content\n      ref={ref}\n      sideOffset={sideOffset}\n      className={cn(\n        \"z-50 min-w-[8rem] overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground shadow-md data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n        className\n      )}\n      {...props}\n    />\n  </DropdownMenuPrimitive.Portal>\n))\nDropdownMenuContent.displayName = DropdownMenuPrimitive.Content.displayName\n\nconst DropdownMenuItem = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.Item> & {\n    inset?: boolean\n  }\n>(({ className, inset, ...props }, ref) => (\n  <DropdownMenuPrimitive.Item\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none transition-colors focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  />\n))\nDropdownMenuItem.displayName = DropdownMenuPrimitive.Item.displayName\n\nconst DropdownMenuCheckboxItem = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.CheckboxItem>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.CheckboxItem>\n>(({ className, children, checked, ...props }, ref) => (\n  <DropdownMenuPrimitive.CheckboxItem\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none transition-colors focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    checked={checked}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <DropdownMenuPrimitive.ItemIndicator>\n        <Check className=\"h-4 w-4\" />\n      </DropdownMenuPrimitive.ItemIndicator>\n    </span>\n    {children}\n  </DropdownMenuPrimitive.CheckboxItem>\n))\nDropdownMenuCheckboxItem.displayName =\n  DropdownMenuPrimitive.CheckboxItem.displayName\n\nconst DropdownMenuRadioItem = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.RadioItem>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.RadioItem>\n>(({ className, children, ...props }, ref) => (\n  <DropdownMenuPrimitive.RadioItem\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none transition-colors focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <DropdownMenuPrimitive.ItemIndicator>\n        <Circle className=\"h-2 w-2 fill-current\" />\n      </DropdownMenuPrimitive.ItemIndicator>\n    </span>\n    {children}\n  </DropdownMenuPrimitive.RadioItem>\n))\nDropdownMenuRadioItem.displayName = DropdownMenuPrimitive.RadioItem.displayName\n\nconst DropdownMenuLabel = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.Label>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.Label> & {\n    inset?: boolean\n  }\n>(({ className, inset, ...props }, ref) => (\n  <DropdownMenuPrimitive.Label\n    ref={ref}\n    className={cn(\n      \"px-2 py-1.5 text-sm font-semibold\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  />\n))\nDropdownMenuLabel.displayName = DropdownMenuPrimitive.Label.displayName\n\nconst DropdownMenuSeparator = React.forwardRef<\n  React.ElementRef<typeof DropdownMenuPrimitive.Separator>,\n  React.ComponentPropsWithoutRef<typeof DropdownMenuPrimitive.Separator>\n>(({ className, ...props }, ref) => (\n  <DropdownMenuPrimitive.Separator\n    ref={ref}\n    className={cn(\"-mx-1 my-1 h-px bg-muted\", className)}\n    {...props}\n  />\n))\nDropdownMenuSeparator.displayName = DropdownMenuPrimitive.Separator.displayName\n\nconst DropdownMenuShortcut = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLSpanElement>) => {\n  return (\n    <span\n      className={cn(\"ml-auto text-xs tracking-widest opacity-60\", className)}\n      {...props}\n    />\n  )\n}\nDropdownMenuShortcut.displayName = \"DropdownMenuShortcut\"\n\nexport {\n  DropdownMenu,\n  DropdownMenuTrigger,\n  DropdownMenuContent,\n  DropdownMenuItem,\n  DropdownMenuCheckboxItem,\n  DropdownMenuRadioItem,\n  DropdownMenuLabel,\n  DropdownMenuSeparator,\n  DropdownMenuShortcut,\n  DropdownMenuGroup,\n  DropdownMenuPortal,\n  DropdownMenuSub,\n  DropdownMenuSubContent,\n  DropdownMenuSubTrigger,\n  DropdownMenuRadioGroup,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/form.beta.tsx",
    "content": "// This file contains new form components based on @tanstack/form\n// The components should be aligned with original form components.\n\nimport { useRegisterFieldInFormSection } from '@/components/form-sections';\nimport { Button } from '@/components/ui/button';\nimport { Label } from '@/components/ui/label';\nimport { cn } from '@/lib/utils';\nimport { Slot } from '@radix-ui/react-slot';\nimport type { FieldValidators } from '@tanstack/react-form';\nimport * as FormPrimitives from '@tanstack/react-form';\nimport { type DeepValue, type FieldApi, type FormApi, type ReactFormExtendedApi, useField } from '@tanstack/react-form';\nimport { Loader2Icon } from 'lucide-react';\nimport * as React from 'react';\nimport { type ComponentProps, createContext, type FormEvent, type ReactNode, useContext, useId } from 'react';\n\nconst FormContext = createContext<{\n  form: FormPrimitives.ReactFormExtendedApi<any, any>\n  disabled?: boolean\n  submissionError?: unknown;\n} | undefined\n>(undefined);\n\nfunction useFormContext<\n  TFormData,\n  TFormValidator extends FormPrimitives.Validator<TFormData, unknown> | undefined = undefined,\n> () {\n  const api = useContext(FormContext);\n  if (!api) {\n    throw new Error('Require tanstack form context');\n  }\n  return {\n    ...api,\n    form: api.form as FormPrimitives.ReactFormExtendedApi<TFormData, TFormValidator>,\n  };\n}\n\nfunction formDomEventHandlers (form: FormApi<any>, disabled?: boolean): Pick<ComponentProps<'form'>, 'onSubmit' | 'onReset'> {\n  return {\n    onSubmit: (event: FormEvent<HTMLFormElement>) => {\n      event.preventDefault();\n      event.stopPropagation();\n      if (!disabled) {\n        void form.handleSubmit();\n      }\n    },\n    onReset: (event: FormEvent<HTMLFormElement>) => {\n      event.preventDefault();\n      event.stopPropagation();\n      if (!disabled) {\n        void form.reset();\n      }\n    },\n  };\n}\n\nconst FormFieldContext = createContext<{ name: any, mode?: 'value' | 'array' | undefined } | undefined>(undefined);\nconst FormItemContext = createContext<{ id: string } | undefined>(undefined);\n\nfunction useFormField<\n  TFormData,\n  TName extends FormPrimitives.DeepKeys<TFormData>,\n  TFormValidator extends FormPrimitives.Validator<TFormData, unknown> | undefined = undefined,\n> () {\n  const { form } = useFormContext<TFormData, TFormValidator>();\n  const fieldContext = useContext(FormFieldContext);\n  const itemContext = useContext(FormItemContext);\n\n  if (!fieldContext) {\n    throw new Error('useFormField() should be used within <FormField>');\n  }\n\n  const field = form.getFieldMeta(fieldContext.name as TName);\n\n  const id = itemContext?.id;\n\n  const idProps = id ? {\n    id: id,\n    formItemId: `${id}-form-item`,\n    formDescriptionId: `${id}-form-item-description`,\n    formMessageId: `${id}-form-item-message`,\n  } : {\n    id: undefined,\n    formItemId: undefined,\n    formDescriptionId: undefined,\n    formMessageId: undefined,\n  };\n\n  return {\n    name: fieldContext.name as TName,\n    field,\n    ...idProps,\n  };\n}\n\ninterface FormProps<\n  TFormData,\n  TFormValidator extends FormPrimitives.Validator<TFormData, unknown> | undefined = undefined,\n> {\n  form: FormPrimitives.ReactFormExtendedApi<TFormData, TFormValidator>;\n  disabled?: boolean;\n  submissionError?: unknown;\n  children: ReactNode;\n}\n\nfunction Form<\n  TFormData,\n  TFormValidator extends FormPrimitives.Validator<TFormData, unknown> | undefined = undefined,\n> ({ children, disabled, submissionError, form }: FormProps<TFormData, TFormValidator>) {\n  return (\n    <FormContext value={{ form, submissionError, disabled }}>\n      {children}\n    </FormContext>\n  );\n}\n\nfunction FormField<\n  TFormData = any,\n  TName extends FormPrimitives.DeepKeys<TFormData> = any,\n  TFieldValidator extends FormPrimitives.Validator<DeepValue<TFormData, TName>, unknown> | undefined = undefined,\n  TFormValidator extends FormPrimitives.Validator<TFormData, unknown> | undefined = undefined,\n> ({ name, defaultValue, validators, mode, render }: {\n  name: TName\n  defaultValue?: DeepValue<TFormData, TName>\n  mode?: 'value' | 'array' | undefined\n  validators?: FieldValidators<TFormData, TName, TFieldValidator, TFormValidator>;\n  render: (\n    field: FieldApi<TFormData, TName, TFieldValidator, TFormValidator, FormPrimitives.DeepValue<TFormData, TName>>,\n    form: ReactFormExtendedApi<TFormData, TFormValidator>,\n    disabled: boolean | undefined,\n  ) => ReactNode\n}) {\n  const { form, disabled } = useFormContext<TFormData, TFormValidator>();\n\n  const field = useField<TFormData, TName, TFieldValidator, TFormValidator, DeepValue<TFormData, TName>>({\n    form,\n    name,\n    mode,\n    defaultValue: defaultValue as never /** type issue */,\n    validators,\n  });\n\n  useRegisterFieldInFormSection(field);\n\n  return (\n    <FormFieldContext value={{ name, mode }}>\n      {render(field, form, disabled)}\n    </FormFieldContext>\n  );\n}\n\nfunction FormItem ({ className, ref, ...props }: ComponentProps<'div'>) {\n  const _id = useId();\n  const id = props.id ?? _id;\n  return (\n    <FormItemContext.Provider value={{ id }}>\n      <div ref={ref} className={cn('space-y-2', className)} {...props} />\n    </FormItemContext.Provider>\n  );\n}\n\nfunction FormLabel ({ ref, className, ...props }: ComponentProps<typeof Label>) {\n  const { field, formItemId } = useFormField();\n  const error = !!field?.errors?.length;\n\n  return (\n    <Label\n      ref={ref}\n      className={cn(error && 'text-destructive', className)}\n      htmlFor={formItemId}\n      {...props}\n    />\n  );\n}\n\nfunction FormControl ({ ref, ...props }: ComponentProps<typeof Slot>) {\n  const { field, formItemId, formDescriptionId, formMessageId } = useFormField();\n  const error = field?.errors?.[0];\n\n  return (\n    <Slot\n      ref={ref}\n      id={formItemId}\n      aria-describedby={\n        !error\n          ? `${formDescriptionId}`\n          : `${formDescriptionId} ${formMessageId}`\n      }\n      aria-invalid={!!error}\n      {...props}\n    />\n  );\n}\n\nfunction FormDescription ({ ref, className, ...props }: ComponentProps<'p'>) {\n  const { formDescriptionId } = useFormField();\n\n  return (\n    <p\n      ref={ref}\n      id={formDescriptionId}\n      className={cn('text-sm text-muted-foreground', className)}\n      {...props}\n    />\n  );\n}\n\nfunction FormMessage ({ ref, className, children, ...props }: ComponentProps<'p'>) {\n  const { field, formMessageId } = useFormField();\n  const error = field?.errors?.[0];\n  const body = error ? String(error) : children;\n\n  if (!body) {\n    return null;\n  }\n\n  return (\n    <p\n      ref={ref}\n      id={formMessageId}\n      className={cn('text-sm font-medium text-destructive', className)}\n      {...props}\n    >\n      {body}\n    </p>\n  );\n}\n\nfunction FormSubmit ({\n  children,\n  submittingChildren,\n  asChild,\n  disabled,\n  transitioning,\n  ...props\n}: Omit<ComponentProps<typeof Button>, 'formAction' | 'type'> & {\n  /*\n   * Used when to start a transition after created an entity. The loader indicator will be shown while transitioning.\n   */\n  transitioning?: boolean\n  submittingChildren?: ReactNode;\n}) {\n  const { form } = useFormContext();\n\n  return (\n    <Button {...props} type=\"submit\" disabled={form.state.isSubmitting || transitioning || disabled}>\n      {asChild\n        ? children\n        : (form.state.isSubmitting || transitioning)\n          ? <>\n            <Loader2Icon className=\"animate-spin repeat-infinite\" />\n            {submittingChildren ?? children}\n          </>\n          : children}\n    </Button>\n  );\n}\n\nexport { useFormContext, Form, FormField, FormItem, FormLabel, FormControl, FormMessage, FormDescription, FormSubmit, formDomEventHandlers };\n"
  },
  {
    "path": "frontend/app/src/components/ui/form.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as LabelPrimitive from \"@radix-ui/react-label\"\nimport { Slot } from \"@radix-ui/react-slot\"\nimport {\n  Controller,\n  ControllerProps,\n  FieldPath,\n  FieldValues,\n  FormProvider,\n  useFormContext,\n} from \"react-hook-form\"\n\nimport { cn } from \"@/lib/utils\"\nimport { Label } from \"@/components/ui/label\"\n\n/**\n * @deprecated\n */\nconst Form = FormProvider\n\ntype FormFieldContextValue<\n  TFieldValues extends FieldValues = FieldValues,\n  TName extends FieldPath<TFieldValues> = FieldPath<TFieldValues>\n> = {\n  name: TName\n}\n\nconst FormFieldContext = React.createContext<FormFieldContextValue>(\n  {} as FormFieldContextValue\n)\n\n/**\n * @deprecated\n */\nconst FormField = <\n  TFieldValues extends FieldValues = FieldValues,\n  TName extends FieldPath<TFieldValues> = FieldPath<TFieldValues>\n>({\n  ...props\n}: ControllerProps<TFieldValues, TName>) => {\n  return (\n    <FormFieldContext.Provider value={{ name: props.name }}>\n      <Controller {...props} />\n    </FormFieldContext.Provider>\n  )\n}\n\n/**\n * @deprecated\n */\nconst useFormField = () => {\n  const fieldContext = React.useContext(FormFieldContext)\n  const itemContext = React.useContext(FormItemContext)\n  const { getFieldState, formState } = useFormContext()\n\n  const fieldState = getFieldState(fieldContext.name, formState)\n\n  if (!fieldContext) {\n    throw new Error(\"useFormField should be used within <FormField>\")\n  }\n\n  const { id } = itemContext\n\n  return {\n    id,\n    name: fieldContext.name,\n    formItemId: `${id}-form-item`,\n    formDescriptionId: `${id}-form-item-description`,\n    formMessageId: `${id}-form-item-message`,\n    ...fieldState,\n  }\n}\n\ntype FormItemContextValue = {\n  id: string\n}\n\nconst FormItemContext = React.createContext<FormItemContextValue>(\n  {} as FormItemContextValue\n)\n\n/**\n * @deprecated\n */\nconst FormItem = React.forwardRef<\n  HTMLDivElement,\n  React.HTMLAttributes<HTMLDivElement>\n>(({ className, ...props }, ref) => {\n  const id = React.useId()\n\n  return (\n    <FormItemContext.Provider value={{ id }}>\n      <div ref={ref} className={cn(\"space-y-2\", className)} {...props} />\n    </FormItemContext.Provider>\n  )\n})\nFormItem.displayName = \"FormItem\"\n\n/**\n * @deprecated\n */\nconst FormLabel = React.forwardRef<\n  React.ElementRef<typeof LabelPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof LabelPrimitive.Root>\n>(({ className, ...props }, ref) => {\n  const { error, formItemId } = useFormField()\n\n  return (\n    <Label\n      ref={ref}\n      className={cn(error && \"text-destructive\", className)}\n      htmlFor={formItemId}\n      {...props}\n    />\n  )\n})\nFormLabel.displayName = \"FormLabel\"\n\n/**\n * @deprecated\n */\nconst FormControl = React.forwardRef<\n  React.ElementRef<typeof Slot>,\n  React.ComponentPropsWithoutRef<typeof Slot>\n>(({ ...props }, ref) => {\n  const { error, formItemId, formDescriptionId, formMessageId } = useFormField()\n\n  return (\n    <Slot\n      ref={ref}\n      id={formItemId}\n      aria-describedby={\n        !error\n          ? `${formDescriptionId}`\n          : `${formDescriptionId} ${formMessageId}`\n      }\n      aria-invalid={!!error}\n      {...props}\n    />\n  )\n})\nFormControl.displayName = \"FormControl\"\n\n/**\n * @deprecated\n */\nconst FormDescription = React.forwardRef<\n  HTMLParagraphElement,\n  React.HTMLAttributes<HTMLParagraphElement>\n>(({ className, ...props }, ref) => {\n  const { formDescriptionId } = useFormField()\n\n  return (\n    <p\n      ref={ref}\n      id={formDescriptionId}\n      className={cn(\"text-sm text-muted-foreground\", className)}\n      {...props}\n    />\n  )\n})\nFormDescription.displayName = \"FormDescription\"\n\n/**\n * @deprecated\n */\nconst FormMessage = React.forwardRef<\n  HTMLParagraphElement,\n  React.HTMLAttributes<HTMLParagraphElement>\n>(({ className, children, ...props }, ref) => {\n  const { error, formMessageId } = useFormField()\n  const body = error?.message ? String(error?.message) : children\n\n  if (!body) {\n    return null\n  }\n\n  return (\n    <p\n      ref={ref}\n      id={formMessageId}\n      className={cn(\"text-sm font-medium text-destructive\", className)}\n      {...props}\n    >\n      {body}\n    </p>\n  )\n})\nFormMessage.displayName = \"FormMessage\"\n\nexport {\n  useFormField,\n  Form,\n  FormItem,\n  FormLabel,\n  FormControl,\n  FormDescription,\n  FormMessage,\n  FormField,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/hover-card.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as HoverCardPrimitive from \"@radix-ui/react-hover-card\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst HoverCard = HoverCardPrimitive.Root\n\nconst HoverCardTrigger = HoverCardPrimitive.Trigger\n\nconst HoverCardContent = React.forwardRef<\n  React.ElementRef<typeof HoverCardPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof HoverCardPrimitive.Content>\n>(({ className, align = \"center\", sideOffset = 4, ...props }, ref) => (\n  <HoverCardPrimitive.Content\n    ref={ref}\n    align={align}\n    sideOffset={sideOffset}\n    className={cn(\n      \"z-50 w-64 rounded-md border bg-popover p-4 text-popover-foreground shadow-md outline-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n      className\n    )}\n    {...props}\n  />\n))\nHoverCardContent.displayName = HoverCardPrimitive.Content.displayName\n\nexport { HoverCard, HoverCardTrigger, HoverCardContent }\n"
  },
  {
    "path": "frontend/app/src/components/ui/input-otp.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport { OTPInput, OTPInputContext } from \"input-otp\"\nimport { Dot } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst InputOTP = React.forwardRef<\n  React.ElementRef<typeof OTPInput>,\n  React.ComponentPropsWithoutRef<typeof OTPInput>\n>(({ className, containerClassName, ...props }, ref) => (\n  <OTPInput\n    ref={ref}\n    containerClassName={cn(\n      \"flex items-center gap-2 has-[:disabled]:opacity-50\",\n      containerClassName\n    )}\n    className={cn(\"disabled:cursor-not-allowed\", className)}\n    {...props}\n  />\n))\nInputOTP.displayName = \"InputOTP\"\n\nconst InputOTPGroup = React.forwardRef<\n  React.ElementRef<\"div\">,\n  React.ComponentPropsWithoutRef<\"div\">\n>(({ className, ...props }, ref) => (\n  <div ref={ref} className={cn(\"flex items-center\", className)} {...props} />\n))\nInputOTPGroup.displayName = \"InputOTPGroup\"\n\nconst InputOTPSlot = React.forwardRef<\n  React.ElementRef<\"div\">,\n  React.ComponentPropsWithoutRef<\"div\"> & { index: number }\n>(({ index, className, ...props }, ref) => {\n  const inputOTPContext = React.useContext(OTPInputContext)\n  const { char, hasFakeCaret, isActive } = inputOTPContext.slots[index]\n\n  return (\n    <div\n      ref={ref}\n      className={cn(\n        \"relative flex h-10 w-10 items-center justify-center border-y border-r border-input text-sm transition-all first:rounded-l-md first:border-l last:rounded-r-md\",\n        isActive && \"z-10 ring-2 ring-ring ring-offset-background\",\n        className\n      )}\n      {...props}\n    >\n      {char}\n      {hasFakeCaret && (\n        <div className=\"pointer-events-none absolute inset-0 flex items-center justify-center\">\n          <div className=\"h-4 w-px animate-caret-blink bg-foreground duration-1000\" />\n        </div>\n      )}\n    </div>\n  )\n})\nInputOTPSlot.displayName = \"InputOTPSlot\"\n\nconst InputOTPSeparator = React.forwardRef<\n  React.ElementRef<\"div\">,\n  React.ComponentPropsWithoutRef<\"div\">\n>(({ ...props }, ref) => (\n  <div ref={ref} role=\"separator\" {...props}>\n    <Dot />\n  </div>\n))\nInputOTPSeparator.displayName = \"InputOTPSeparator\"\n\nexport { InputOTP, InputOTPGroup, InputOTPSlot, InputOTPSeparator }\n"
  },
  {
    "path": "frontend/app/src/components/ui/input.tsx",
    "content": "import * as React from \"react\"\n\nimport { cn } from \"@/lib/utils\"\n\nexport interface InputProps extends React.ComponentProps<\"input\"> {\n}\n\nconst Input = React.forwardRef<HTMLInputElement, InputProps>(\n  ({ className, type, ...props }, ref) => {\n    return (\n      <input\n        type={type}\n        className={cn(\n          \"flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-base ring-offset-background file:border-0 file:bg-transparent file:text-sm file:font-medium file:text-foreground placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 md:text-sm\",\n          className\n        )}\n        ref={ref}\n        {...props}\n      />\n    )\n  }\n)\nInput.displayName = \"Input\"\n\nexport { Input }\n"
  },
  {
    "path": "frontend/app/src/components/ui/label.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as LabelPrimitive from \"@radix-ui/react-label\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst labelVariants = cva(\n  \"text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70\"\n)\n\nconst Label = React.forwardRef<\n  React.ElementRef<typeof LabelPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof LabelPrimitive.Root> &\n    VariantProps<typeof labelVariants>\n>(({ className, ...props }, ref) => (\n  <LabelPrimitive.Root\n    ref={ref}\n    className={cn(labelVariants(), className)}\n    {...props}\n  />\n))\nLabel.displayName = LabelPrimitive.Root.displayName\n\nexport { Label }\n"
  },
  {
    "path": "frontend/app/src/components/ui/menubar.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as MenubarPrimitive from \"@radix-ui/react-menubar\"\nimport { Check, ChevronRight, Circle } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst MenubarMenu = MenubarPrimitive.Menu\n\nconst MenubarGroup = MenubarPrimitive.Group\n\nconst MenubarPortal = MenubarPrimitive.Portal\n\nconst MenubarSub = MenubarPrimitive.Sub\n\nconst MenubarRadioGroup = MenubarPrimitive.RadioGroup\n\nconst Menubar = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.Root>\n>(({ className, ...props }, ref) => (\n  <MenubarPrimitive.Root\n    ref={ref}\n    className={cn(\n      \"flex h-10 items-center space-x-1 rounded-md border bg-background p-1\",\n      className\n    )}\n    {...props}\n  />\n))\nMenubar.displayName = MenubarPrimitive.Root.displayName\n\nconst MenubarTrigger = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.Trigger>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.Trigger>\n>(({ className, ...props }, ref) => (\n  <MenubarPrimitive.Trigger\n    ref={ref}\n    className={cn(\n      \"flex cursor-default select-none items-center rounded-sm px-3 py-1.5 text-sm font-medium outline-none focus:bg-accent focus:text-accent-foreground data-[state=open]:bg-accent data-[state=open]:text-accent-foreground\",\n      className\n    )}\n    {...props}\n  />\n))\nMenubarTrigger.displayName = MenubarPrimitive.Trigger.displayName\n\nconst MenubarSubTrigger = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.SubTrigger>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.SubTrigger> & {\n    inset?: boolean\n  }\n>(({ className, inset, children, ...props }, ref) => (\n  <MenubarPrimitive.SubTrigger\n    ref={ref}\n    className={cn(\n      \"flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[state=open]:bg-accent data-[state=open]:text-accent-foreground\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  >\n    {children}\n    <ChevronRight className=\"ml-auto h-4 w-4\" />\n  </MenubarPrimitive.SubTrigger>\n))\nMenubarSubTrigger.displayName = MenubarPrimitive.SubTrigger.displayName\n\nconst MenubarSubContent = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.SubContent>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.SubContent>\n>(({ className, ...props }, ref) => (\n  <MenubarPrimitive.SubContent\n    ref={ref}\n    className={cn(\n      \"z-50 min-w-[8rem] overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n      className\n    )}\n    {...props}\n  />\n))\nMenubarSubContent.displayName = MenubarPrimitive.SubContent.displayName\n\nconst MenubarContent = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.Content>\n>(\n  (\n    { className, align = \"start\", alignOffset = -4, sideOffset = 8, ...props },\n    ref\n  ) => (\n    <MenubarPrimitive.Portal>\n      <MenubarPrimitive.Content\n        ref={ref}\n        align={align}\n        alignOffset={alignOffset}\n        sideOffset={sideOffset}\n        className={cn(\n          \"z-50 min-w-[12rem] overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground shadow-md data-[state=open]:animate-in data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n          className\n        )}\n        {...props}\n      />\n    </MenubarPrimitive.Portal>\n  )\n)\nMenubarContent.displayName = MenubarPrimitive.Content.displayName\n\nconst MenubarItem = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.Item> & {\n    inset?: boolean\n  }\n>(({ className, inset, ...props }, ref) => (\n  <MenubarPrimitive.Item\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  />\n))\nMenubarItem.displayName = MenubarPrimitive.Item.displayName\n\nconst MenubarCheckboxItem = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.CheckboxItem>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.CheckboxItem>\n>(({ className, children, checked, ...props }, ref) => (\n  <MenubarPrimitive.CheckboxItem\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    checked={checked}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <MenubarPrimitive.ItemIndicator>\n        <Check className=\"h-4 w-4\" />\n      </MenubarPrimitive.ItemIndicator>\n    </span>\n    {children}\n  </MenubarPrimitive.CheckboxItem>\n))\nMenubarCheckboxItem.displayName = MenubarPrimitive.CheckboxItem.displayName\n\nconst MenubarRadioItem = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.RadioItem>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.RadioItem>\n>(({ className, children, ...props }, ref) => (\n  <MenubarPrimitive.RadioItem\n    ref={ref}\n    className={cn(\n      \"relative flex cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <MenubarPrimitive.ItemIndicator>\n        <Circle className=\"h-2 w-2 fill-current\" />\n      </MenubarPrimitive.ItemIndicator>\n    </span>\n    {children}\n  </MenubarPrimitive.RadioItem>\n))\nMenubarRadioItem.displayName = MenubarPrimitive.RadioItem.displayName\n\nconst MenubarLabel = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.Label>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.Label> & {\n    inset?: boolean\n  }\n>(({ className, inset, ...props }, ref) => (\n  <MenubarPrimitive.Label\n    ref={ref}\n    className={cn(\n      \"px-2 py-1.5 text-sm font-semibold\",\n      inset && \"pl-8\",\n      className\n    )}\n    {...props}\n  />\n))\nMenubarLabel.displayName = MenubarPrimitive.Label.displayName\n\nconst MenubarSeparator = React.forwardRef<\n  React.ElementRef<typeof MenubarPrimitive.Separator>,\n  React.ComponentPropsWithoutRef<typeof MenubarPrimitive.Separator>\n>(({ className, ...props }, ref) => (\n  <MenubarPrimitive.Separator\n    ref={ref}\n    className={cn(\"-mx-1 my-1 h-px bg-muted\", className)}\n    {...props}\n  />\n))\nMenubarSeparator.displayName = MenubarPrimitive.Separator.displayName\n\nconst MenubarShortcut = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLSpanElement>) => {\n  return (\n    <span\n      className={cn(\n        \"ml-auto text-xs tracking-widest text-muted-foreground\",\n        className\n      )}\n      {...props}\n    />\n  )\n}\nMenubarShortcut.displayname = \"MenubarShortcut\"\n\nexport {\n  Menubar,\n  MenubarMenu,\n  MenubarTrigger,\n  MenubarContent,\n  MenubarItem,\n  MenubarSeparator,\n  MenubarLabel,\n  MenubarCheckboxItem,\n  MenubarRadioGroup,\n  MenubarRadioItem,\n  MenubarPortal,\n  MenubarSubContent,\n  MenubarSubTrigger,\n  MenubarGroup,\n  MenubarSub,\n  MenubarShortcut,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/navigation-menu.tsx",
    "content": "import * as React from \"react\"\nimport * as NavigationMenuPrimitive from \"@radix-ui/react-navigation-menu\"\nimport { cva } from \"class-variance-authority\"\nimport { ChevronDown } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst NavigationMenu = React.forwardRef<\n  React.ElementRef<typeof NavigationMenuPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof NavigationMenuPrimitive.Root>\n>(({ className, children, ...props }, ref) => (\n  <NavigationMenuPrimitive.Root\n    ref={ref}\n    className={cn(\n      \"relative z-10 flex max-w-max flex-1 items-center justify-center\",\n      className\n    )}\n    {...props}\n  >\n    {children}\n    <NavigationMenuViewport />\n  </NavigationMenuPrimitive.Root>\n))\nNavigationMenu.displayName = NavigationMenuPrimitive.Root.displayName\n\nconst NavigationMenuList = React.forwardRef<\n  React.ElementRef<typeof NavigationMenuPrimitive.List>,\n  React.ComponentPropsWithoutRef<typeof NavigationMenuPrimitive.List>\n>(({ className, ...props }, ref) => (\n  <NavigationMenuPrimitive.List\n    ref={ref}\n    className={cn(\n      \"group flex flex-1 list-none items-center justify-center space-x-1\",\n      className\n    )}\n    {...props}\n  />\n))\nNavigationMenuList.displayName = NavigationMenuPrimitive.List.displayName\n\nconst NavigationMenuItem = NavigationMenuPrimitive.Item\n\nconst navigationMenuTriggerStyle = cva(\n  \"group inline-flex h-10 w-max items-center justify-center rounded-md bg-background px-4 py-2 text-sm font-medium transition-colors hover:bg-accent hover:text-accent-foreground focus:bg-accent focus:text-accent-foreground focus:outline-none disabled:pointer-events-none disabled:opacity-50 data-[active]:bg-accent/50 data-[state=open]:bg-accent/50\"\n)\n\nconst NavigationMenuTrigger = React.forwardRef<\n  React.ElementRef<typeof NavigationMenuPrimitive.Trigger>,\n  React.ComponentPropsWithoutRef<typeof NavigationMenuPrimitive.Trigger>\n>(({ className, children, ...props }, ref) => (\n  <NavigationMenuPrimitive.Trigger\n    ref={ref}\n    className={cn(navigationMenuTriggerStyle(), \"group\", className)}\n    {...props}\n  >\n    {children}{\" \"}\n    <ChevronDown\n      className=\"relative top-[1px] ml-1 h-3 w-3 transition duration-200 group-data-[state=open]:rotate-180\"\n      aria-hidden=\"true\"\n    />\n  </NavigationMenuPrimitive.Trigger>\n))\nNavigationMenuTrigger.displayName = NavigationMenuPrimitive.Trigger.displayName\n\nconst NavigationMenuContent = React.forwardRef<\n  React.ElementRef<typeof NavigationMenuPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof NavigationMenuPrimitive.Content>\n>(({ className, ...props }, ref) => (\n  <NavigationMenuPrimitive.Content\n    ref={ref}\n    className={cn(\n      \"left-0 top-0 w-full data-[motion^=from-]:animate-in data-[motion^=to-]:animate-out data-[motion^=from-]:fade-in data-[motion^=to-]:fade-out data-[motion=from-end]:slide-in-from-right-52 data-[motion=from-start]:slide-in-from-left-52 data-[motion=to-end]:slide-out-to-right-52 data-[motion=to-start]:slide-out-to-left-52 md:absolute md:w-auto \",\n      className\n    )}\n    {...props}\n  />\n))\nNavigationMenuContent.displayName = NavigationMenuPrimitive.Content.displayName\n\nconst NavigationMenuLink = NavigationMenuPrimitive.Link\n\nconst NavigationMenuViewport = React.forwardRef<\n  React.ElementRef<typeof NavigationMenuPrimitive.Viewport>,\n  React.ComponentPropsWithoutRef<typeof NavigationMenuPrimitive.Viewport>\n>(({ className, ...props }, ref) => (\n  <div className={cn(\"absolute left-0 top-full flex justify-center\")}>\n    <NavigationMenuPrimitive.Viewport\n      className={cn(\n        \"origin-top-center relative mt-1.5 h-[var(--radix-navigation-menu-viewport-height)] w-full overflow-hidden rounded-md border bg-popover text-popover-foreground shadow-lg data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-90 md:w-[var(--radix-navigation-menu-viewport-width)]\",\n        className\n      )}\n      ref={ref}\n      {...props}\n    />\n  </div>\n))\nNavigationMenuViewport.displayName =\n  NavigationMenuPrimitive.Viewport.displayName\n\nconst NavigationMenuIndicator = React.forwardRef<\n  React.ElementRef<typeof NavigationMenuPrimitive.Indicator>,\n  React.ComponentPropsWithoutRef<typeof NavigationMenuPrimitive.Indicator>\n>(({ className, ...props }, ref) => (\n  <NavigationMenuPrimitive.Indicator\n    ref={ref}\n    className={cn(\n      \"top-full z-[1] flex h-1.5 items-end justify-center overflow-hidden data-[state=visible]:animate-in data-[state=hidden]:animate-out data-[state=hidden]:fade-out data-[state=visible]:fade-in\",\n      className\n    )}\n    {...props}\n  >\n    <div className=\"relative top-[60%] h-2 w-2 rotate-45 rounded-tl-sm bg-border shadow-md\" />\n  </NavigationMenuPrimitive.Indicator>\n))\nNavigationMenuIndicator.displayName =\n  NavigationMenuPrimitive.Indicator.displayName\n\nexport {\n  navigationMenuTriggerStyle,\n  NavigationMenu,\n  NavigationMenuList,\n  NavigationMenuItem,\n  NavigationMenuContent,\n  NavigationMenuTrigger,\n  NavigationMenuLink,\n  NavigationMenuIndicator,\n  NavigationMenuViewport,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/pagination.tsx",
    "content": "import * as React from \"react\"\nimport { ChevronLeft, ChevronRight, MoreHorizontal } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\nimport { ButtonProps, buttonVariants } from \"@/components/ui/button\"\n\nconst Pagination = ({ className, ...props }: React.ComponentProps<\"nav\">) => (\n  <nav\n    role=\"navigation\"\n    aria-label=\"pagination\"\n    className={cn(\"mx-auto flex w-full justify-center\", className)}\n    {...props}\n  />\n)\nPagination.displayName = \"Pagination\"\n\nconst PaginationContent = React.forwardRef<\n  HTMLUListElement,\n  React.ComponentProps<\"ul\">\n>(({ className, ...props }, ref) => (\n  <ul\n    ref={ref}\n    className={cn(\"flex flex-row items-center gap-1\", className)}\n    {...props}\n  />\n))\nPaginationContent.displayName = \"PaginationContent\"\n\nconst PaginationItem = React.forwardRef<\n  HTMLLIElement,\n  React.ComponentProps<\"li\">\n>(({ className, ...props }, ref) => (\n  <li ref={ref} className={cn(\"\", className)} {...props} />\n))\nPaginationItem.displayName = \"PaginationItem\"\n\ntype PaginationLinkProps = {\n  isActive?: boolean\n} & Pick<ButtonProps, \"size\"> &\n  React.ComponentProps<\"a\">\n\nconst PaginationLink = ({\n  className,\n  isActive,\n  size = \"icon\",\n  ...props\n}: PaginationLinkProps) => (\n  <a\n    aria-current={isActive ? \"page\" : undefined}\n    className={cn(\n      buttonVariants({\n        variant: isActive ? \"outline\" : \"ghost\",\n        size,\n      }),\n      className\n    )}\n    {...props}\n  />\n)\nPaginationLink.displayName = \"PaginationLink\"\n\nconst PaginationPrevious = ({\n  className,\n  ...props\n}: React.ComponentProps<typeof PaginationLink>) => (\n  <PaginationLink\n    aria-label=\"Go to previous page\"\n    size=\"default\"\n    className={cn(\"gap-1 pl-2.5\", className)}\n    {...props}\n  >\n    <ChevronLeft className=\"h-4 w-4\" />\n    <span>Previous</span>\n  </PaginationLink>\n)\nPaginationPrevious.displayName = \"PaginationPrevious\"\n\nconst PaginationNext = ({\n  className,\n  ...props\n}: React.ComponentProps<typeof PaginationLink>) => (\n  <PaginationLink\n    aria-label=\"Go to next page\"\n    size=\"default\"\n    className={cn(\"gap-1 pr-2.5\", className)}\n    {...props}\n  >\n    <span>Next</span>\n    <ChevronRight className=\"h-4 w-4\" />\n  </PaginationLink>\n)\nPaginationNext.displayName = \"PaginationNext\"\n\nconst PaginationEllipsis = ({\n  className,\n  ...props\n}: React.ComponentProps<\"span\">) => (\n  <span\n    aria-hidden\n    className={cn(\"flex h-9 w-9 items-center justify-center\", className)}\n    {...props}\n  >\n    <MoreHorizontal className=\"h-4 w-4\" />\n    <span className=\"sr-only\">More pages</span>\n  </span>\n)\nPaginationEllipsis.displayName = \"PaginationEllipsis\"\n\nexport {\n  Pagination,\n  PaginationContent,\n  PaginationEllipsis,\n  PaginationItem,\n  PaginationLink,\n  PaginationNext,\n  PaginationPrevious,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/popover.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as PopoverPrimitive from \"@radix-ui/react-popover\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Popover = PopoverPrimitive.Root\n\nconst PopoverTrigger = PopoverPrimitive.Trigger\n\nconst PopoverContent = React.forwardRef<\n  React.ElementRef<typeof PopoverPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof PopoverPrimitive.Content>\n>(({ className, align = \"center\", sideOffset = 4, ...props }, ref) => (\n  <PopoverPrimitive.Portal>\n    <PopoverPrimitive.Content\n      ref={ref}\n      align={align}\n      sideOffset={sideOffset}\n      className={cn(\n        \"z-50 w-72 rounded-md border bg-popover p-4 text-popover-foreground shadow-md outline-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 origin-[--radix-popover-content-transform-origin]\",\n        className\n      )}\n      {...props}\n    />\n  </PopoverPrimitive.Portal>\n))\nPopoverContent.displayName = PopoverPrimitive.Content.displayName\n\nexport { Popover, PopoverTrigger, PopoverContent }\n"
  },
  {
    "path": "frontend/app/src/components/ui/progress.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as ProgressPrimitive from \"@radix-ui/react-progress\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Progress = React.forwardRef<\n  React.ElementRef<typeof ProgressPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof ProgressPrimitive.Root>\n>(({ className, value, ...props }, ref) => (\n  <ProgressPrimitive.Root\n    ref={ref}\n    className={cn(\n      \"relative h-4 w-full overflow-hidden rounded-full bg-secondary\",\n      className\n    )}\n    {...props}\n  >\n    <ProgressPrimitive.Indicator\n      className=\"h-full w-full flex-1 bg-primary transition-all\"\n      style={{ transform: `translateX(-${100 - (value || 0)}%)` }}\n    />\n  </ProgressPrimitive.Root>\n))\nProgress.displayName = ProgressPrimitive.Root.displayName\n\nexport { Progress }\n"
  },
  {
    "path": "frontend/app/src/components/ui/radio-group.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as RadioGroupPrimitive from \"@radix-ui/react-radio-group\"\nimport { Circle } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst RadioGroup = React.forwardRef<\n  React.ElementRef<typeof RadioGroupPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof RadioGroupPrimitive.Root>\n>(({ className, ...props }, ref) => {\n  return (\n    <RadioGroupPrimitive.Root\n      className={cn(\"grid gap-2\", className)}\n      {...props}\n      ref={ref}\n    />\n  )\n})\nRadioGroup.displayName = RadioGroupPrimitive.Root.displayName\n\nconst RadioGroupItem = React.forwardRef<\n  React.ElementRef<typeof RadioGroupPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof RadioGroupPrimitive.Item>\n>(({ className, ...props }, ref) => {\n  return (\n    <RadioGroupPrimitive.Item\n      ref={ref}\n      className={cn(\n        \"aspect-square h-4 w-4 rounded-full border border-primary text-primary ring-offset-background focus:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50\",\n        className\n      )}\n      {...props}\n    >\n      <RadioGroupPrimitive.Indicator className=\"flex items-center justify-center\">\n        <Circle className=\"h-2.5 w-2.5 fill-current text-current\" />\n      </RadioGroupPrimitive.Indicator>\n    </RadioGroupPrimitive.Item>\n  )\n})\nRadioGroupItem.displayName = RadioGroupPrimitive.Item.displayName\n\nexport { RadioGroup, RadioGroupItem }\n"
  },
  {
    "path": "frontend/app/src/components/ui/resizable.tsx",
    "content": "\"use client\"\n\nimport { GripVertical } from \"lucide-react\"\nimport * as ResizablePrimitive from \"react-resizable-panels\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst ResizablePanelGroup = ({\n  className,\n  ...props\n}: React.ComponentProps<typeof ResizablePrimitive.PanelGroup>) => (\n  <ResizablePrimitive.PanelGroup\n    className={cn(\n      \"flex h-full w-full data-[panel-group-direction=vertical]:flex-col\",\n      className\n    )}\n    {...props}\n  />\n)\n\nconst ResizablePanel = ResizablePrimitive.Panel\n\nconst ResizableHandle = ({\n  withHandle,\n  className,\n  ...props\n}: React.ComponentProps<typeof ResizablePrimitive.PanelResizeHandle> & {\n  withHandle?: boolean\n}) => (\n  <ResizablePrimitive.PanelResizeHandle\n    className={cn(\n      \"relative flex w-px items-center justify-center bg-border after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring focus-visible:ring-offset-1 data-[panel-group-direction=vertical]:h-px data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:-translate-y-1/2 data-[panel-group-direction=vertical]:after:translate-x-0 [&[data-panel-group-direction=vertical]>div]:rotate-90\",\n      className\n    )}\n    {...props}\n  >\n    {withHandle && (\n      <div className=\"z-10 flex h-4 w-3 items-center justify-center rounded-sm border bg-border\">\n        <GripVertical className=\"h-2.5 w-2.5\" />\n      </div>\n    )}\n  </ResizablePrimitive.PanelResizeHandle>\n)\n\nexport { ResizablePanelGroup, ResizablePanel, ResizableHandle }\n"
  },
  {
    "path": "frontend/app/src/components/ui/scroll-area.tsx",
    "content": "\"use client\"\n\nimport type { Ref } from 'react';\nimport * as React from \"react\"\nimport * as ScrollAreaPrimitive from \"@radix-ui/react-scroll-area\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst ScrollArea = React.forwardRef<\n  React.ElementRef<typeof ScrollAreaPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof ScrollAreaPrimitive.Root> & { viewportRef?: Ref<HTMLDivElement> }\n>(({ className, children, viewportRef, ...props }, ref) => (\n  <ScrollAreaPrimitive.Root\n    ref={ref}\n    className={cn(\"relative overflow-hidden\", className)}\n    {...props}\n  >\n    <ScrollAreaPrimitive.Viewport ref={viewportRef} className=\"h-full w-full rounded-[inherit]\">\n      {children}\n    </ScrollAreaPrimitive.Viewport>\n    <ScrollBar />\n    <ScrollAreaPrimitive.Corner />\n  </ScrollAreaPrimitive.Root>\n))\nScrollArea.displayName = ScrollAreaPrimitive.Root.displayName\n\nconst ScrollBar = React.forwardRef<\n  React.ElementRef<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>,\n  React.ComponentPropsWithoutRef<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>\n>(({ className, orientation = \"vertical\", ...props }, ref) => (\n  <ScrollAreaPrimitive.ScrollAreaScrollbar\n    ref={ref}\n    orientation={orientation}\n    className={cn(\n      \"flex touch-none select-none transition-colors\",\n      orientation === \"vertical\" &&\n        \"h-full w-2.5 border-l border-l-transparent p-[1px]\",\n      orientation === \"horizontal\" &&\n        \"h-2.5 flex-col border-t border-t-transparent p-[1px]\",\n      className\n    )}\n    {...props}\n  >\n    <ScrollAreaPrimitive.ScrollAreaThumb className=\"relative flex-1 rounded-full bg-border\" />\n  </ScrollAreaPrimitive.ScrollAreaScrollbar>\n))\nScrollBar.displayName = ScrollAreaPrimitive.ScrollAreaScrollbar.displayName\n\nexport { ScrollArea, ScrollBar }\n"
  },
  {
    "path": "frontend/app/src/components/ui/select.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as SelectPrimitive from \"@radix-ui/react-select\"\nimport { Check, ChevronDown, ChevronUp } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Select = SelectPrimitive.Root\n\nconst SelectGroup = SelectPrimitive.Group\n\nconst SelectValue = SelectPrimitive.Value\n\nconst SelectTrigger = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.Trigger>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.Trigger>\n>(({ className, children, ...props }, ref) => (\n  <SelectPrimitive.Trigger\n    ref={ref}\n    className={cn(\n      \"flex h-10 w-full items-center justify-between rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 [&>span]:line-clamp-1\",\n      className\n    )}\n    {...props}\n  >\n    {children}\n    <SelectPrimitive.Icon asChild>\n      <ChevronDown className=\"h-4 w-4 opacity-50\" />\n    </SelectPrimitive.Icon>\n  </SelectPrimitive.Trigger>\n))\nSelectTrigger.displayName = SelectPrimitive.Trigger.displayName\n\nconst SelectScrollUpButton = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.ScrollUpButton>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.ScrollUpButton>\n>(({ className, ...props }, ref) => (\n  <SelectPrimitive.ScrollUpButton\n    ref={ref}\n    className={cn(\n      \"flex cursor-default items-center justify-center py-1\",\n      className\n    )}\n    {...props}\n  >\n    <ChevronUp className=\"h-4 w-4\" />\n  </SelectPrimitive.ScrollUpButton>\n))\nSelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName\n\nconst SelectScrollDownButton = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.ScrollDownButton>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.ScrollDownButton>\n>(({ className, ...props }, ref) => (\n  <SelectPrimitive.ScrollDownButton\n    ref={ref}\n    className={cn(\n      \"flex cursor-default items-center justify-center py-1\",\n      className\n    )}\n    {...props}\n  >\n    <ChevronDown className=\"h-4 w-4\" />\n  </SelectPrimitive.ScrollDownButton>\n))\nSelectScrollDownButton.displayName =\n  SelectPrimitive.ScrollDownButton.displayName\n\nconst SelectContent = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.Content>\n>(({ className, children, position = \"popper\", ...props }, ref) => (\n  <SelectPrimitive.Portal>\n    <SelectPrimitive.Content\n      ref={ref}\n      className={cn(\n        \"relative z-50 max-h-96 min-w-[8rem] overflow-hidden rounded-md border bg-popover text-popover-foreground shadow-md data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n        position === \"popper\" &&\n          \"data-[side=bottom]:translate-y-1 data-[side=left]:-translate-x-1 data-[side=right]:translate-x-1 data-[side=top]:-translate-y-1\",\n        className\n      )}\n      position={position}\n      {...props}\n    >\n      <SelectScrollUpButton />\n      <SelectPrimitive.Viewport\n        className={cn(\n          \"p-1\",\n          position === \"popper\" &&\n            \"h-[var(--radix-select-trigger-height)] w-full min-w-[var(--radix-select-trigger-width)]\"\n        )}\n      >\n        {children}\n      </SelectPrimitive.Viewport>\n      <SelectScrollDownButton />\n    </SelectPrimitive.Content>\n  </SelectPrimitive.Portal>\n))\nSelectContent.displayName = SelectPrimitive.Content.displayName\n\nconst SelectLabel = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.Label>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.Label>\n>(({ className, ...props }, ref) => (\n  <SelectPrimitive.Label\n    ref={ref}\n    className={cn(\"py-1.5 pl-8 pr-2 text-sm font-semibold\", className)}\n    {...props}\n  />\n))\nSelectLabel.displayName = SelectPrimitive.Label.displayName\n\nconst SelectItem = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.Item>\n>(({ className, children, ...props }, ref) => (\n  <SelectPrimitive.Item\n    ref={ref}\n    className={cn(\n      \"relative flex w-full cursor-default select-none items-center rounded-sm py-1.5 pl-8 pr-2 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50\",\n      className\n    )}\n    {...props}\n  >\n    <span className=\"absolute left-2 flex h-3.5 w-3.5 items-center justify-center\">\n      <SelectPrimitive.ItemIndicator>\n        <Check className=\"h-4 w-4\" />\n      </SelectPrimitive.ItemIndicator>\n    </span>\n\n    <SelectPrimitive.ItemText>{children}</SelectPrimitive.ItemText>\n  </SelectPrimitive.Item>\n))\nSelectItem.displayName = SelectPrimitive.Item.displayName\n\nconst SelectSeparator = React.forwardRef<\n  React.ElementRef<typeof SelectPrimitive.Separator>,\n  React.ComponentPropsWithoutRef<typeof SelectPrimitive.Separator>\n>(({ className, ...props }, ref) => (\n  <SelectPrimitive.Separator\n    ref={ref}\n    className={cn(\"-mx-1 my-1 h-px bg-muted\", className)}\n    {...props}\n  />\n))\nSelectSeparator.displayName = SelectPrimitive.Separator.displayName\n\nexport {\n  Select,\n  SelectGroup,\n  SelectValue,\n  SelectTrigger,\n  SelectContent,\n  SelectLabel,\n  SelectItem,\n  SelectSeparator,\n  SelectScrollUpButton,\n  SelectScrollDownButton,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/separator.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as SeparatorPrimitive from \"@radix-ui/react-separator\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Separator = React.forwardRef<\n  React.ElementRef<typeof SeparatorPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof SeparatorPrimitive.Root>\n>(\n  (\n    { className, orientation = \"horizontal\", decorative = true, ...props },\n    ref\n  ) => (\n    <SeparatorPrimitive.Root\n      ref={ref}\n      decorative={decorative}\n      orientation={orientation}\n      className={cn(\n        \"shrink-0 bg-border\",\n        orientation === \"horizontal\" ? \"h-[1px] w-full\" : \"h-full w-[1px]\",\n        className\n      )}\n      {...props}\n    />\n  )\n)\nSeparator.displayName = SeparatorPrimitive.Root.displayName\n\nexport { Separator }\n"
  },
  {
    "path": "frontend/app/src/components/ui/sheet.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as SheetPrimitive from \"@radix-ui/react-dialog\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\nimport { X } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Sheet = SheetPrimitive.Root\n\nconst SheetTrigger = SheetPrimitive.Trigger\n\nconst SheetClose = SheetPrimitive.Close\n\nconst SheetPortal = SheetPrimitive.Portal\n\nconst SheetOverlay = React.forwardRef<\n  React.ElementRef<typeof SheetPrimitive.Overlay>,\n  React.ComponentPropsWithoutRef<typeof SheetPrimitive.Overlay>\n>(({ className, ...props }, ref) => (\n  <SheetPrimitive.Overlay\n    className={cn(\n      \"fixed inset-0 z-50 bg-black/80  data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0\",\n      className\n    )}\n    {...props}\n    ref={ref}\n  />\n))\nSheetOverlay.displayName = SheetPrimitive.Overlay.displayName\n\nconst sheetVariants = cva(\n  \"fixed z-50 gap-4 bg-background p-6 shadow-lg transition ease-in-out data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:duration-300 data-[state=open]:duration-500\",\n  {\n    variants: {\n      side: {\n        top: \"inset-x-0 top-0 border-b data-[state=closed]:slide-out-to-top data-[state=open]:slide-in-from-top\",\n        bottom:\n          \"inset-x-0 bottom-0 border-t data-[state=closed]:slide-out-to-bottom data-[state=open]:slide-in-from-bottom\",\n        left: \"inset-y-0 left-0 h-full w-3/4 border-r data-[state=closed]:slide-out-to-left data-[state=open]:slide-in-from-left sm:max-w-sm\",\n        right:\n          \"inset-y-0 right-0 h-full w-3/4  border-l data-[state=closed]:slide-out-to-right data-[state=open]:slide-in-from-right sm:max-w-sm\",\n      },\n    },\n    defaultVariants: {\n      side: \"right\",\n    },\n  }\n)\n\ninterface SheetContentProps\n  extends React.ComponentPropsWithoutRef<typeof SheetPrimitive.Content>,\n    VariantProps<typeof sheetVariants> {}\n\nconst SheetContent = React.forwardRef<\n  React.ElementRef<typeof SheetPrimitive.Content>,\n  SheetContentProps\n>(({ side = \"right\", className, children, ...props }, ref) => (\n  <SheetPortal>\n    <SheetOverlay />\n    <SheetPrimitive.Content\n      ref={ref}\n      className={cn(sheetVariants({ side }), className)}\n      {...props}\n    >\n      {children}\n      <SheetPrimitive.Close className=\"absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-secondary\">\n        <X className=\"h-4 w-4\" />\n        <span className=\"sr-only\">Close</span>\n      </SheetPrimitive.Close>\n    </SheetPrimitive.Content>\n  </SheetPortal>\n))\nSheetContent.displayName = SheetPrimitive.Content.displayName\n\nconst SheetHeader = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\n      \"flex flex-col space-y-2 text-center sm:text-left\",\n      className\n    )}\n    {...props}\n  />\n)\nSheetHeader.displayName = \"SheetHeader\"\n\nconst SheetFooter = ({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) => (\n  <div\n    className={cn(\n      \"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2\",\n      className\n    )}\n    {...props}\n  />\n)\nSheetFooter.displayName = \"SheetFooter\"\n\nconst SheetTitle = React.forwardRef<\n  React.ElementRef<typeof SheetPrimitive.Title>,\n  React.ComponentPropsWithoutRef<typeof SheetPrimitive.Title>\n>(({ className, ...props }, ref) => (\n  <SheetPrimitive.Title\n    ref={ref}\n    className={cn(\"text-lg font-semibold text-foreground\", className)}\n    {...props}\n  />\n))\nSheetTitle.displayName = SheetPrimitive.Title.displayName\n\nconst SheetDescription = React.forwardRef<\n  React.ElementRef<typeof SheetPrimitive.Description>,\n  React.ComponentPropsWithoutRef<typeof SheetPrimitive.Description>\n>(({ className, ...props }, ref) => (\n  <SheetPrimitive.Description\n    ref={ref}\n    className={cn(\"text-sm text-muted-foreground\", className)}\n    {...props}\n  />\n))\nSheetDescription.displayName = SheetPrimitive.Description.displayName\n\nexport {\n  Sheet,\n  SheetPortal,\n  SheetOverlay,\n  SheetTrigger,\n  SheetClose,\n  SheetContent,\n  SheetHeader,\n  SheetFooter,\n  SheetTitle,\n  SheetDescription,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/sidebar.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport { Slot } from \"@radix-ui/react-slot\"\nimport { VariantProps, cva } from \"class-variance-authority\"\nimport { PanelLeft } from \"lucide-react\"\n\nimport { useIsMobile } from \"@/hooks/use-mobile\"\nimport { cn } from \"@/lib/utils\"\nimport { Button } from \"@/components/ui/button\"\nimport { Input } from \"@/components/ui/input\"\nimport { Separator } from \"@/components/ui/separator\"\nimport { Sheet, SheetContent } from \"@/components/ui/sheet\"\nimport { Skeleton } from \"@/components/ui/skeleton\"\nimport {\n  Tooltip,\n  TooltipContent,\n  TooltipProvider,\n  TooltipTrigger,\n} from \"@/components/ui/tooltip\"\n\nconst SIDEBAR_COOKIE_NAME = \"sidebar:state\"\nconst SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 7\nconst SIDEBAR_WIDTH = \"16rem\"\nconst SIDEBAR_WIDTH_MOBILE = \"18rem\"\nconst SIDEBAR_WIDTH_ICON = \"3rem\"\nconst SIDEBAR_KEYBOARD_SHORTCUT = \"b\"\n\ntype SidebarContext = {\n  state: \"expanded\" | \"collapsed\"\n  open: boolean\n  setOpen: (open: boolean) => void\n  openMobile: boolean\n  setOpenMobile: (open: boolean) => void\n  isMobile: boolean\n  toggleSidebar: () => void\n}\n\nconst SidebarContext = React.createContext<SidebarContext | null>(null)\n\nfunction useSidebar() {\n  const context = React.useContext(SidebarContext)\n  if (!context) {\n    throw new Error(\"useSidebar must be used within a SidebarProvider.\")\n  }\n\n  return context\n}\n\nconst SidebarProvider = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\"> & {\n    defaultOpen?: boolean\n    open?: boolean\n    onOpenChange?: (open: boolean) => void\n  }\n>(\n  (\n    {\n      defaultOpen = true,\n      open: openProp,\n      onOpenChange: setOpenProp,\n      className,\n      style,\n      children,\n      ...props\n    },\n    ref\n  ) => {\n    const isMobile = useIsMobile()\n    const [openMobile, setOpenMobile] = React.useState(false)\n\n    // This is the internal state of the sidebar.\n    // We use openProp and setOpenProp for control from outside the component.\n    const [_open, _setOpen] = React.useState(defaultOpen)\n    const open = openProp ?? _open\n    const setOpen = React.useCallback(\n      (value: boolean | ((value: boolean) => boolean)) => {\n        const openState = typeof value === \"function\" ? value(open) : value\n        if (setOpenProp) {\n          setOpenProp(openState)\n        } else {\n          _setOpen(openState)\n        }\n\n        // This sets the cookie to keep the sidebar state.\n        document.cookie = `${SIDEBAR_COOKIE_NAME}=${openState}; path=/; max-age=${SIDEBAR_COOKIE_MAX_AGE}`\n      },\n      [setOpenProp, open]\n    )\n\n    // Helper to toggle the sidebar.\n    const toggleSidebar = React.useCallback(() => {\n      return isMobile\n        ? setOpenMobile((open) => !open)\n        : setOpen((open) => !open)\n    }, [isMobile, setOpen, setOpenMobile])\n\n    // Adds a keyboard shortcut to toggle the sidebar.\n    React.useEffect(() => {\n      const handleKeyDown = (event: KeyboardEvent) => {\n        if (\n          event.key === SIDEBAR_KEYBOARD_SHORTCUT &&\n          (event.metaKey || event.ctrlKey)\n        ) {\n          event.preventDefault()\n          toggleSidebar()\n        }\n      }\n\n      window.addEventListener(\"keydown\", handleKeyDown)\n      return () => window.removeEventListener(\"keydown\", handleKeyDown)\n    }, [toggleSidebar])\n\n    // We add a state so that we can do data-state=\"expanded\" or \"collapsed\".\n    // This makes it easier to style the sidebar with Tailwind classes.\n    const state = open ? \"expanded\" : \"collapsed\"\n\n    const contextValue = React.useMemo<SidebarContext>(\n      () => ({\n        state,\n        open,\n        setOpen,\n        isMobile,\n        openMobile,\n        setOpenMobile,\n        toggleSidebar,\n      }),\n      [state, open, setOpen, isMobile, openMobile, setOpenMobile, toggleSidebar]\n    )\n\n    return (\n      <SidebarContext.Provider value={contextValue}>\n        <TooltipProvider delayDuration={0}>\n          <div\n            style={\n              {\n                \"--sidebar-width\": SIDEBAR_WIDTH,\n                \"--sidebar-width-icon\": SIDEBAR_WIDTH_ICON,\n                ...style,\n              } as React.CSSProperties\n            }\n            className={cn(\n              \"group/sidebar-wrapper flex min-h-svh w-full has-[[data-variant=inset]]:bg-sidebar\",\n              className\n            )}\n            ref={ref}\n            {...props}\n          >\n            {children}\n          </div>\n        </TooltipProvider>\n      </SidebarContext.Provider>\n    )\n  }\n)\nSidebarProvider.displayName = \"SidebarProvider\"\n\nconst Sidebar = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\"> & {\n    side?: \"left\" | \"right\"\n    variant?: \"sidebar\" | \"floating\" | \"inset\"\n    collapsible?: \"offcanvas\" | \"icon\" | \"none\"\n  }\n>(\n  (\n    {\n      side = \"left\",\n      variant = \"sidebar\",\n      collapsible = \"offcanvas\",\n      className,\n      children,\n      ...props\n    },\n    ref\n  ) => {\n    const { isMobile, state, openMobile, setOpenMobile } = useSidebar()\n\n    if (collapsible === \"none\") {\n      return (\n        <div\n          className={cn(\n            \"flex h-full w-[--sidebar-width] flex-col bg-sidebar text-sidebar-foreground\",\n            className\n          )}\n          ref={ref}\n          {...props}\n        >\n          {children}\n        </div>\n      )\n    }\n\n    if (isMobile) {\n      return (\n        <Sheet open={openMobile} onOpenChange={setOpenMobile} {...props}>\n          <SheetContent\n            data-sidebar=\"sidebar\"\n            data-mobile=\"true\"\n            className=\"w-[--sidebar-width] bg-sidebar p-0 text-sidebar-foreground [&>button]:hidden\"\n            style={\n              {\n                \"--sidebar-width\": SIDEBAR_WIDTH_MOBILE,\n              } as React.CSSProperties\n            }\n            side={side}\n          >\n            <div className=\"flex h-full w-full flex-col\">{children}</div>\n          </SheetContent>\n        </Sheet>\n      )\n    }\n\n    return (\n      <div\n        ref={ref}\n        className=\"group peer hidden md:block text-sidebar-foreground\"\n        data-state={state}\n        data-collapsible={state === \"collapsed\" ? collapsible : \"\"}\n        data-variant={variant}\n        data-side={side}\n      >\n        {/* This is what handles the sidebar gap on desktop */}\n        <div\n          className={cn(\n            \"duration-200 relative h-svh w-[--sidebar-width] bg-transparent transition-[width] ease-linear\",\n            \"group-data-[collapsible=offcanvas]:w-0\",\n            \"group-data-[side=right]:rotate-180\",\n            variant === \"floating\" || variant === \"inset\"\n              ? \"group-data-[collapsible=icon]:w-[calc(var(--sidebar-width-icon)_+_theme(spacing.4))]\"\n              : \"group-data-[collapsible=icon]:w-[--sidebar-width-icon]\"\n          )}\n        />\n        <div\n          className={cn(\n            \"duration-200 fixed inset-y-0 z-10 hidden h-svh w-[--sidebar-width] transition-[left,right,width] ease-linear md:flex\",\n            side === \"left\"\n              ? \"left-0 group-data-[collapsible=offcanvas]:left-[calc(var(--sidebar-width)*-1)]\"\n              : \"right-0 group-data-[collapsible=offcanvas]:right-[calc(var(--sidebar-width)*-1)]\",\n            // Adjust the padding for floating and inset variants.\n            variant === \"floating\" || variant === \"inset\"\n              ? \"p-2 group-data-[collapsible=icon]:w-[calc(var(--sidebar-width-icon)_+_theme(spacing.4)_+2px)]\"\n              : \"group-data-[collapsible=icon]:w-[--sidebar-width-icon] group-data-[side=left]:border-r group-data-[side=right]:border-l\",\n            className\n          )}\n          {...props}\n        >\n          <div\n            data-sidebar=\"sidebar\"\n            className=\"flex h-full w-full flex-col bg-sidebar group-data-[variant=floating]:rounded-lg group-data-[variant=floating]:border group-data-[variant=floating]:border-sidebar-border group-data-[variant=floating]:shadow\"\n          >\n            {children}\n          </div>\n        </div>\n      </div>\n    )\n  }\n)\nSidebar.displayName = \"Sidebar\"\n\nconst SidebarTrigger = React.forwardRef<\n  React.ElementRef<typeof Button>,\n  React.ComponentProps<typeof Button>\n>(({ className, onClick, ...props }, ref) => {\n  const { toggleSidebar } = useSidebar()\n\n  return (\n    <Button\n      ref={ref}\n      data-sidebar=\"trigger\"\n      variant=\"ghost\"\n      size=\"icon\"\n      className={cn(\"h-7 w-7\", className)}\n      onClick={(event) => {\n        onClick?.(event)\n        toggleSidebar()\n      }}\n      {...props}\n    >\n      <PanelLeft />\n      <span className=\"sr-only\">Toggle Sidebar</span>\n    </Button>\n  )\n})\nSidebarTrigger.displayName = \"SidebarTrigger\"\n\nconst SidebarRail = React.forwardRef<\n  HTMLButtonElement,\n  React.ComponentProps<\"button\">\n>(({ className, ...props }, ref) => {\n  const { toggleSidebar } = useSidebar()\n\n  return (\n    <button\n      ref={ref}\n      data-sidebar=\"rail\"\n      aria-label=\"Toggle Sidebar\"\n      tabIndex={-1}\n      onClick={toggleSidebar}\n      title=\"Toggle Sidebar\"\n      className={cn(\n        \"absolute inset-y-0 z-20 hidden w-4 -translate-x-1/2 transition-all ease-linear after:absolute after:inset-y-0 after:left-1/2 after:w-[2px] hover:after:bg-sidebar-border group-data-[side=left]:-right-4 group-data-[side=right]:left-0 sm:flex\",\n        \"[[data-side=left]_&]:cursor-w-resize [[data-side=right]_&]:cursor-e-resize\",\n        \"[[data-side=left][data-state=collapsed]_&]:cursor-e-resize [[data-side=right][data-state=collapsed]_&]:cursor-w-resize\",\n        \"group-data-[collapsible=offcanvas]:translate-x-0 group-data-[collapsible=offcanvas]:after:left-full group-data-[collapsible=offcanvas]:hover:bg-sidebar\",\n        \"[[data-side=left][data-collapsible=offcanvas]_&]:-right-2\",\n        \"[[data-side=right][data-collapsible=offcanvas]_&]:-left-2\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarRail.displayName = \"SidebarRail\"\n\nconst SidebarInset = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"main\">\n>(({ className, ...props }, ref) => {\n  return (\n    <main\n      ref={ref}\n      className={cn(\n        \"relative flex min-h-svh flex-1 flex-col bg-background\",\n        \"peer-data-[variant=inset]:min-h-[calc(100svh-theme(spacing.4))] md:peer-data-[variant=inset]:m-2 md:peer-data-[state=collapsed]:peer-data-[variant=inset]:ml-2 md:peer-data-[variant=inset]:ml-0 md:peer-data-[variant=inset]:rounded-xl md:peer-data-[variant=inset]:shadow\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarInset.displayName = \"SidebarInset\"\n\nconst SidebarInput = React.forwardRef<\n  React.ElementRef<typeof Input>,\n  React.ComponentProps<typeof Input>\n>(({ className, ...props }, ref) => {\n  return (\n    <Input\n      ref={ref}\n      data-sidebar=\"input\"\n      className={cn(\n        \"h-8 w-full bg-background shadow-none focus-visible:ring-2 focus-visible:ring-sidebar-ring\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarInput.displayName = \"SidebarInput\"\n\nconst SidebarHeader = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\">\n>(({ className, ...props }, ref) => {\n  return (\n    <div\n      ref={ref}\n      data-sidebar=\"header\"\n      className={cn(\"flex flex-col gap-2 p-2\", className)}\n      {...props}\n    />\n  )\n})\nSidebarHeader.displayName = \"SidebarHeader\"\n\nconst SidebarFooter = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\">\n>(({ className, ...props }, ref) => {\n  return (\n    <div\n      ref={ref}\n      data-sidebar=\"footer\"\n      className={cn(\"flex flex-col gap-2 p-2\", className)}\n      {...props}\n    />\n  )\n})\nSidebarFooter.displayName = \"SidebarFooter\"\n\nconst SidebarSeparator = React.forwardRef<\n  React.ElementRef<typeof Separator>,\n  React.ComponentProps<typeof Separator>\n>(({ className, ...props }, ref) => {\n  return (\n    <Separator\n      ref={ref}\n      data-sidebar=\"separator\"\n      className={cn(\"mx-2 w-auto bg-sidebar-border\", className)}\n      {...props}\n    />\n  )\n})\nSidebarSeparator.displayName = \"SidebarSeparator\"\n\nconst SidebarContent = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\">\n>(({ className, ...props }, ref) => {\n  return (\n    <div\n      ref={ref}\n      data-sidebar=\"content\"\n      className={cn(\n        \"flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarContent.displayName = \"SidebarContent\"\n\nconst SidebarGroup = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\">\n>(({ className, ...props }, ref) => {\n  return (\n    <div\n      ref={ref}\n      data-sidebar=\"group\"\n      className={cn(\"relative flex w-full min-w-0 flex-col p-2\", className)}\n      {...props}\n    />\n  )\n})\nSidebarGroup.displayName = \"SidebarGroup\"\n\nconst SidebarGroupLabel = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\"> & { asChild?: boolean }\n>(({ className, asChild = false, ...props }, ref) => {\n  const Comp = asChild ? Slot : \"div\"\n\n  return (\n    <Comp\n      ref={ref}\n      data-sidebar=\"group-label\"\n      className={cn(\n        \"duration-200 flex h-8 shrink-0 items-center rounded-md px-2 text-xs font-medium text-sidebar-foreground/70 outline-none ring-sidebar-ring transition-[margin,opa] ease-linear focus-visible:ring-2 [&>svg]:size-4 [&>svg]:shrink-0\",\n        \"group-data-[collapsible=icon]:-mt-8 group-data-[collapsible=icon]:opacity-0\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarGroupLabel.displayName = \"SidebarGroupLabel\"\n\nconst SidebarGroupAction = React.forwardRef<\n  HTMLButtonElement,\n  React.ComponentProps<\"button\"> & { asChild?: boolean }\n>(({ className, asChild = false, ...props }, ref) => {\n  const Comp = asChild ? Slot : \"button\"\n\n  return (\n    <Comp\n      ref={ref}\n      data-sidebar=\"group-action\"\n      className={cn(\n        \"absolute right-3 top-3.5 flex aspect-square w-5 items-center justify-center rounded-md p-0 text-sidebar-foreground outline-none ring-sidebar-ring transition-transform hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 [&>svg]:size-4 [&>svg]:shrink-0\",\n        // Increases the hit area of the button on mobile.\n        \"after:absolute after:-inset-2 after:md:hidden\",\n        \"group-data-[collapsible=icon]:hidden\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarGroupAction.displayName = \"SidebarGroupAction\"\n\nconst SidebarGroupContent = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\">\n>(({ className, ...props }, ref) => (\n  <div\n    ref={ref}\n    data-sidebar=\"group-content\"\n    className={cn(\"w-full text-sm\", className)}\n    {...props}\n  />\n))\nSidebarGroupContent.displayName = \"SidebarGroupContent\"\n\nconst SidebarMenu = React.forwardRef<\n  HTMLUListElement,\n  React.ComponentProps<\"ul\">\n>(({ className, ...props }, ref) => (\n  <ul\n    ref={ref}\n    data-sidebar=\"menu\"\n    className={cn(\"flex w-full min-w-0 flex-col gap-1\", className)}\n    {...props}\n  />\n))\nSidebarMenu.displayName = \"SidebarMenu\"\n\nconst SidebarMenuItem = React.forwardRef<\n  HTMLLIElement,\n  React.ComponentProps<\"li\">\n>(({ className, ...props }, ref) => (\n  <li\n    ref={ref}\n    data-sidebar=\"menu-item\"\n    className={cn(\"group/menu-item relative\", className)}\n    {...props}\n  />\n))\nSidebarMenuItem.displayName = \"SidebarMenuItem\"\n\nconst sidebarMenuButtonVariants = cva(\n  \"peer/menu-button flex w-full items-center gap-2 overflow-hidden rounded-md p-2 text-left text-sm outline-none ring-sidebar-ring transition-[width,height,padding] hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 active:bg-sidebar-accent active:text-sidebar-accent-foreground disabled:pointer-events-none disabled:opacity-50 group-has-[[data-sidebar=menu-action]]/menu-item:pr-8 aria-disabled:pointer-events-none aria-disabled:opacity-50 data-[active=true]:bg-sidebar-accent data-[active=true]:font-medium data-[active=true]:text-sidebar-accent-foreground data-[state=open]:hover:bg-sidebar-accent data-[state=open]:hover:text-sidebar-accent-foreground group-data-[collapsible=icon]:!size-8 group-data-[collapsible=icon]:!p-2 [&>span:last-child]:truncate [&>svg]:size-4 [&>svg]:shrink-0\",\n  {\n    variants: {\n      variant: {\n        default: \"hover:bg-sidebar-accent hover:text-sidebar-accent-foreground\",\n        outline:\n          \"bg-background shadow-[0_0_0_1px_hsl(var(--sidebar-border))] hover:bg-sidebar-accent hover:text-sidebar-accent-foreground hover:shadow-[0_0_0_1px_hsl(var(--sidebar-accent))]\",\n      },\n      size: {\n        default: \"h-8 text-sm\",\n        sm: \"h-7 text-xs\",\n        lg: \"h-12 text-sm group-data-[collapsible=icon]:!p-0\",\n      },\n    },\n    defaultVariants: {\n      variant: \"default\",\n      size: \"default\",\n    },\n  }\n)\n\nconst SidebarMenuButton = React.forwardRef<\n  HTMLButtonElement,\n  React.ComponentProps<\"button\"> & {\n    asChild?: boolean\n    isActive?: boolean\n    tooltip?: string | React.ComponentProps<typeof TooltipContent>\n  } & VariantProps<typeof sidebarMenuButtonVariants>\n>(\n  (\n    {\n      asChild = false,\n      isActive = false,\n      variant = \"default\",\n      size = \"default\",\n      tooltip,\n      className,\n      ...props\n    },\n    ref\n  ) => {\n    const Comp = asChild ? Slot : \"button\"\n    const { isMobile, state } = useSidebar()\n\n    const button = (\n      <Comp\n        ref={ref}\n        data-sidebar=\"menu-button\"\n        data-size={size}\n        data-active={isActive}\n        className={cn(sidebarMenuButtonVariants({ variant, size }), className)}\n        {...props}\n      />\n    )\n\n    if (!tooltip) {\n      return button\n    }\n\n    if (typeof tooltip === \"string\") {\n      tooltip = {\n        children: tooltip,\n      }\n    }\n\n    return (\n      <Tooltip>\n        <TooltipTrigger asChild>{button}</TooltipTrigger>\n        <TooltipContent\n          side=\"right\"\n          align=\"center\"\n          hidden={state !== \"collapsed\" || isMobile}\n          {...tooltip}\n        />\n      </Tooltip>\n    )\n  }\n)\nSidebarMenuButton.displayName = \"SidebarMenuButton\"\n\nconst SidebarMenuAction = React.forwardRef<\n  HTMLButtonElement,\n  React.ComponentProps<\"button\"> & {\n    asChild?: boolean\n    showOnHover?: boolean\n  }\n>(({ className, asChild = false, showOnHover = false, ...props }, ref) => {\n  const Comp = asChild ? Slot : \"button\"\n\n  return (\n    <Comp\n      ref={ref}\n      data-sidebar=\"menu-action\"\n      className={cn(\n        \"absolute right-1 top-1.5 flex aspect-square w-5 items-center justify-center rounded-md p-0 text-sidebar-foreground outline-none ring-sidebar-ring transition-transform hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 peer-hover/menu-button:text-sidebar-accent-foreground [&>svg]:size-4 [&>svg]:shrink-0\",\n        // Increases the hit area of the button on mobile.\n        \"after:absolute after:-inset-2 after:md:hidden\",\n        \"peer-data-[size=sm]/menu-button:top-1\",\n        \"peer-data-[size=default]/menu-button:top-1.5\",\n        \"peer-data-[size=lg]/menu-button:top-2.5\",\n        \"group-data-[collapsible=icon]:hidden\",\n        showOnHover &&\n          \"group-focus-within/menu-item:opacity-100 group-hover/menu-item:opacity-100 data-[state=open]:opacity-100 peer-data-[active=true]/menu-button:text-sidebar-accent-foreground md:opacity-0\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarMenuAction.displayName = \"SidebarMenuAction\"\n\nconst SidebarMenuBadge = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\">\n>(({ className, ...props }, ref) => (\n  <div\n    ref={ref}\n    data-sidebar=\"menu-badge\"\n    className={cn(\n      \"absolute right-1 flex h-5 min-w-5 items-center justify-center rounded-md px-1 text-xs font-medium tabular-nums text-sidebar-foreground select-none pointer-events-none\",\n      \"peer-hover/menu-button:text-sidebar-accent-foreground peer-data-[active=true]/menu-button:text-sidebar-accent-foreground\",\n      \"peer-data-[size=sm]/menu-button:top-1\",\n      \"peer-data-[size=default]/menu-button:top-1.5\",\n      \"peer-data-[size=lg]/menu-button:top-2.5\",\n      \"group-data-[collapsible=icon]:hidden\",\n      \"peer-data-[size=md]/menu-sub-button:top-1\",\n      className\n    )}\n    {...props}\n  />\n))\nSidebarMenuBadge.displayName = \"SidebarMenuBadge\"\n\nconst SidebarMenuSkeleton = React.forwardRef<\n  HTMLDivElement,\n  React.ComponentProps<\"div\"> & {\n    showIcon?: boolean\n  }\n>(({ className, showIcon = false, ...props }, ref) => {\n  // Random width between 50 to 90%.\n  const width = React.useMemo(() => {\n    return `${Math.floor(Math.random() * 40) + 50}%`\n  }, [])\n\n  return (\n    <div\n      ref={ref}\n      data-sidebar=\"menu-skeleton\"\n      className={cn(\"rounded-md h-8 flex gap-2 px-2 items-center\", className)}\n      {...props}\n    >\n      {showIcon && (\n        <Skeleton\n          className=\"size-4 rounded-md\"\n          data-sidebar=\"menu-skeleton-icon\"\n        />\n      )}\n      <Skeleton\n        className=\"h-4 flex-1 max-w-[--skeleton-width]\"\n        data-sidebar=\"menu-skeleton-text\"\n        style={\n          {\n            \"--skeleton-width\": width,\n          } as React.CSSProperties\n        }\n      />\n    </div>\n  )\n})\nSidebarMenuSkeleton.displayName = \"SidebarMenuSkeleton\"\n\nconst SidebarMenuSub = React.forwardRef<\n  HTMLUListElement,\n  React.ComponentProps<\"ul\">\n>(({ className, ...props }, ref) => (\n  <ul\n    ref={ref}\n    data-sidebar=\"menu-sub\"\n    className={cn(\n      \"mx-3.5 flex min-w-0 translate-x-px flex-col gap-1 border-l border-sidebar-border px-2.5 py-0.5\",\n      \"group-data-[collapsible=icon]:hidden\",\n      className\n    )}\n    {...props}\n  />\n))\nSidebarMenuSub.displayName = \"SidebarMenuSub\"\n\nconst SidebarMenuSubItem = React.forwardRef<\n  HTMLLIElement,\n  React.ComponentProps<\"li\">\n>(({ ...props }, ref) => <li ref={ref} {...props} className={cn('relative', props.className)} />)\nSidebarMenuSubItem.displayName = \"SidebarMenuSubItem\"\n\nconst SidebarMenuSubButton = React.forwardRef<\n  HTMLAnchorElement,\n  React.ComponentProps<\"a\"> & {\n    asChild?: boolean\n    size?: \"sm\" | \"md\"\n    isActive?: boolean\n  }\n>(({ asChild = false, size = \"md\", isActive, className, ...props }, ref) => {\n  const Comp = asChild ? Slot : \"a\"\n\n  return (\n    <Comp\n      ref={ref}\n      data-sidebar=\"menu-sub-button\"\n      data-size={size}\n      data-active={isActive}\n      className={cn(\n        \"peer/menu-sub-button\",\n        \"flex h-7 min-w-0 -translate-x-px items-center gap-2 overflow-hidden rounded-md px-2 text-sidebar-foreground outline-none ring-sidebar-ring hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 active:bg-sidebar-accent active:text-sidebar-accent-foreground disabled:pointer-events-none disabled:opacity-50 aria-disabled:pointer-events-none aria-disabled:opacity-50 [&>span:last-child]:truncate [&>svg]:size-4 [&>svg]:shrink-0 [&>svg]:text-sidebar-accent-foreground\",\n        \"data-[active=true]:bg-sidebar-accent data-[active=true]:text-sidebar-accent-foreground\",\n        size === \"sm\" && \"text-xs\",\n        size === \"md\" && \"text-sm\",\n        \"group-data-[collapsible=icon]:hidden\",\n        className\n      )}\n      {...props}\n    />\n  )\n})\nSidebarMenuSubButton.displayName = \"SidebarMenuSubButton\"\n\nexport {\n  Sidebar,\n  SidebarContent,\n  SidebarFooter,\n  SidebarGroup,\n  SidebarGroupAction,\n  SidebarGroupContent,\n  SidebarGroupLabel,\n  SidebarHeader,\n  SidebarInput,\n  SidebarInset,\n  SidebarMenu,\n  SidebarMenuAction,\n  SidebarMenuBadge,\n  SidebarMenuButton,\n  SidebarMenuItem,\n  SidebarMenuSkeleton,\n  SidebarMenuSub,\n  SidebarMenuSubButton,\n  SidebarMenuSubItem,\n  SidebarProvider,\n  SidebarRail,\n  SidebarSeparator,\n  SidebarTrigger,\n  useSidebar,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/skeleton.tsx",
    "content": "import { cn } from \"@/lib/utils\"\n\nfunction Skeleton({\n  className,\n  ...props\n}: React.HTMLAttributes<HTMLDivElement>) {\n  return (\n    <div\n      className={cn(\"animate-pulse rounded-md bg-muted\", className)}\n      {...props}\n    />\n  )\n}\n\nexport { Skeleton }\n"
  },
  {
    "path": "frontend/app/src/components/ui/slider.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as SliderPrimitive from \"@radix-ui/react-slider\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Slider = React.forwardRef<\n  React.ElementRef<typeof SliderPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof SliderPrimitive.Root>\n>(({ className, ...props }, ref) => (\n  <SliderPrimitive.Root\n    ref={ref}\n    className={cn(\n      \"relative flex w-full touch-none select-none items-center\",\n      className\n    )}\n    {...props}\n  >\n    <SliderPrimitive.Track className=\"relative h-2 w-full grow overflow-hidden rounded-full bg-secondary\">\n      <SliderPrimitive.Range className=\"absolute h-full bg-primary\" />\n    </SliderPrimitive.Track>\n    <SliderPrimitive.Thumb className=\"block h-5 w-5 rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50\" />\n  </SliderPrimitive.Root>\n))\nSlider.displayName = SliderPrimitive.Root.displayName\n\nexport { Slider }\n"
  },
  {
    "path": "frontend/app/src/components/ui/sonner.tsx",
    "content": "\"use client\"\n\nimport { useTheme } from \"next-themes\"\nimport { Toaster as Sonner } from \"sonner\"\n\ntype ToasterProps = React.ComponentProps<typeof Sonner>\n\nconst Toaster = ({ ...props }: ToasterProps) => {\n  const { theme = \"system\" } = useTheme()\n\n  return (\n    <Sonner\n      theme={theme as ToasterProps[\"theme\"]}\n      className=\"toaster group\"\n      toastOptions={{\n        classNames: {\n          toast:\n            \"group toast group-[.toaster]:bg-background group-[.toaster]:text-foreground group-[.toaster]:border-border group-[.toaster]:shadow-lg\",\n          description: \"group-[.toast]:text-muted-foreground group-[.toast.success]:text-success-foreground/70 group-[.toast.warning]:text-warning-foreground/70 group-[.toast.info]:text-info-foreground/70 group-[.toast.error]:text-destructive-foreground/70\",\n          success: 'success group-[.toaster]:bg-success group-[.toaster]:text-success-foreground group-[.toaster]:border-success-foreground/30',\n          warning: 'warning group-[.toaster]:bg-warning group-[.toaster]:text-warning-foreground group-[.toaster]:border-warning-foreground/30',\n          info: 'info group-[.toaster]:bg-info group-[.toaster]:text-info-foreground group-[.toaster]:border-info-foreground/30',\n          error: 'error group-[.toaster]:bg-destructive group-[.toaster]:text-destructive-foreground group-[.toaster]:border-destructive-foreground/30',\n          actionButton:\n            \"group-[.toast]:bg-primary group-[.toast]:text-primary-foreground\",\n          cancelButton:\n            \"group-[.toast]:bg-muted group-[.toast]:text-muted-foreground\",\n        },\n      }}\n      {...props}\n    />\n  )\n}\n\nexport { Toaster }\n"
  },
  {
    "path": "frontend/app/src/components/ui/switch.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as SwitchPrimitives from \"@radix-ui/react-switch\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Switch = React.forwardRef<\n  React.ElementRef<typeof SwitchPrimitives.Root>,\n  React.ComponentPropsWithoutRef<typeof SwitchPrimitives.Root>\n>(({ className, ...props }, ref) => (\n  <SwitchPrimitives.Root\n    className={cn(\n      \"peer inline-flex h-6 w-11 shrink-0 cursor-pointer items-center rounded-full border-2 border-transparent transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background disabled:cursor-not-allowed disabled:opacity-50 data-[state=checked]:bg-primary data-[state=unchecked]:bg-input\",\n      className\n    )}\n    {...props}\n    ref={ref}\n  >\n    <SwitchPrimitives.Thumb\n      className={cn(\n        \"pointer-events-none block h-5 w-5 rounded-full bg-background shadow-lg ring-0 transition-transform data-[state=checked]:translate-x-5 data-[state=unchecked]:translate-x-0\"\n      )}\n    />\n  </SwitchPrimitives.Root>\n))\nSwitch.displayName = SwitchPrimitives.Root.displayName\n\nexport { Switch }\n"
  },
  {
    "path": "frontend/app/src/components/ui/table.tsx",
    "content": "import * as React from \"react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Table = React.forwardRef<\n  HTMLTableElement,\n  React.HTMLAttributes<HTMLTableElement>\n>(({ className, ...props }, ref) => (\n  <div className=\"relative w-full overflow-auto\">\n    <table\n      ref={ref}\n      className={cn(\"w-full caption-bottom text-sm\", className)}\n      {...props}\n    />\n  </div>\n))\nTable.displayName = \"Table\"\n\nconst TableHeader = React.forwardRef<\n  HTMLTableSectionElement,\n  React.HTMLAttributes<HTMLTableSectionElement>\n>(({ className, ...props }, ref) => (\n  <thead ref={ref} className={cn(\"[&_tr]:border-b\", className)} {...props} />\n))\nTableHeader.displayName = \"TableHeader\"\n\nconst TableBody = React.forwardRef<\n  HTMLTableSectionElement,\n  React.HTMLAttributes<HTMLTableSectionElement>\n>(({ className, ...props }, ref) => (\n  <tbody\n    ref={ref}\n    className={cn(\"[&_tr:last-child]:border-0\", className)}\n    {...props}\n  />\n))\nTableBody.displayName = \"TableBody\"\n\nconst TableFooter = React.forwardRef<\n  HTMLTableSectionElement,\n  React.HTMLAttributes<HTMLTableSectionElement>\n>(({ className, ...props }, ref) => (\n  <tfoot\n    ref={ref}\n    className={cn(\n      \"border-t bg-muted/50 font-medium [&>tr]:last:border-b-0\",\n      className\n    )}\n    {...props}\n  />\n))\nTableFooter.displayName = \"TableFooter\"\n\nconst TableRow = React.forwardRef<\n  HTMLTableRowElement,\n  React.HTMLAttributes<HTMLTableRowElement>\n>(({ className, ...props }, ref) => (\n  <tr\n    ref={ref}\n    className={cn(\n      \"border-b transition-colors hover:bg-muted/50 data-[state=selected]:bg-muted\",\n      className\n    )}\n    {...props}\n  />\n))\nTableRow.displayName = \"TableRow\"\n\nconst TableHead = React.forwardRef<\n  HTMLTableCellElement,\n  React.ThHTMLAttributes<HTMLTableCellElement>\n>(({ className, ...props }, ref) => (\n  <th\n    ref={ref}\n    className={cn(\n      \"h-12 px-4 text-left align-middle font-medium text-muted-foreground [&:has([role=checkbox])]:pr-0\",\n      className\n    )}\n    {...props}\n  />\n))\nTableHead.displayName = \"TableHead\"\n\nconst TableCell = React.forwardRef<\n  HTMLTableCellElement,\n  React.TdHTMLAttributes<HTMLTableCellElement>\n>(({ className, ...props }, ref) => (\n  <td\n    ref={ref}\n    className={cn(\"p-4 align-middle [&:has([role=checkbox])]:pr-0\", className)}\n    {...props}\n  />\n))\nTableCell.displayName = \"TableCell\"\n\nconst TableCaption = React.forwardRef<\n  HTMLTableCaptionElement,\n  React.HTMLAttributes<HTMLTableCaptionElement>\n>(({ className, ...props }, ref) => (\n  <caption\n    ref={ref}\n    className={cn(\"mt-4 text-sm text-muted-foreground\", className)}\n    {...props}\n  />\n))\nTableCaption.displayName = \"TableCaption\"\n\nexport {\n  Table,\n  TableHeader,\n  TableBody,\n  TableFooter,\n  TableHead,\n  TableRow,\n  TableCell,\n  TableCaption,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/tabs.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as TabsPrimitive from \"@radix-ui/react-tabs\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst Tabs = TabsPrimitive.Root\n\nconst TabsList = React.forwardRef<\n  React.ElementRef<typeof TabsPrimitive.List>,\n  React.ComponentPropsWithoutRef<typeof TabsPrimitive.List>\n>(({ className, ...props }, ref) => (\n  <TabsPrimitive.List\n    ref={ref}\n    className={cn(\n      \"inline-flex h-10 items-center justify-center rounded-md bg-muted p-1 text-muted-foreground\",\n      className\n    )}\n    {...props}\n  />\n))\nTabsList.displayName = TabsPrimitive.List.displayName\n\nconst TabsTrigger = React.forwardRef<\n  React.ElementRef<typeof TabsPrimitive.Trigger>,\n  React.ComponentPropsWithoutRef<typeof TabsPrimitive.Trigger>\n>(({ className, ...props }, ref) => (\n  <TabsPrimitive.Trigger\n    ref={ref}\n    className={cn(\n      \"inline-flex items-center justify-center whitespace-nowrap rounded-sm px-3 py-1.5 text-sm font-medium ring-offset-background transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:bg-background data-[state=active]:text-foreground data-[state=active]:shadow-sm\",\n      className\n    )}\n    {...props}\n  />\n))\nTabsTrigger.displayName = TabsPrimitive.Trigger.displayName\n\nconst TabsContent = React.forwardRef<\n  React.ElementRef<typeof TabsPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof TabsPrimitive.Content>\n>(({ className, ...props }, ref) => (\n  <TabsPrimitive.Content\n    ref={ref}\n    className={cn(\n      \"mt-2 ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2\",\n      className\n    )}\n    {...props}\n  />\n))\nTabsContent.displayName = TabsPrimitive.Content.displayName\n\nexport { Tabs, TabsList, TabsTrigger, TabsContent }\n"
  },
  {
    "path": "frontend/app/src/components/ui/textarea.tsx",
    "content": "import * as React from \"react\"\n\nimport { cn } from \"@/lib/utils\"\n\nexport interface TextareaProps\n  extends React.TextareaHTMLAttributes<HTMLTextAreaElement> {}\n\nconst Textarea = React.forwardRef<HTMLTextAreaElement, TextareaProps>(\n  ({ className, ...props }, ref) => {\n    return (\n      <textarea\n        className={cn(\n          \"flex min-h-[80px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50\",\n          className\n        )}\n        ref={ref}\n        {...props}\n      />\n    )\n  }\n)\nTextarea.displayName = \"Textarea\"\n\nexport { Textarea }\n"
  },
  {
    "path": "frontend/app/src/components/ui/toast.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as ToastPrimitives from \"@radix-ui/react-toast\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\nimport { X } from \"lucide-react\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst ToastProvider = ToastPrimitives.Provider\n\nconst ToastViewport = React.forwardRef<\n  React.ElementRef<typeof ToastPrimitives.Viewport>,\n  React.ComponentPropsWithoutRef<typeof ToastPrimitives.Viewport>\n>(({ className, ...props }, ref) => (\n  <ToastPrimitives.Viewport\n    ref={ref}\n    className={cn(\n      \"fixed top-0 z-[100] flex max-h-screen w-full flex-col-reverse p-4 sm:bottom-0 sm:right-0 sm:top-auto sm:flex-col md:max-w-[420px]\",\n      className\n    )}\n    {...props}\n  />\n))\nToastViewport.displayName = ToastPrimitives.Viewport.displayName\n\nconst toastVariants = cva(\n  \"group pointer-events-auto relative flex w-full items-center justify-between space-x-4 overflow-hidden rounded-md border p-6 pr-8 shadow-lg transition-all data-[swipe=cancel]:translate-x-0 data-[swipe=end]:translate-x-[var(--radix-toast-swipe-end-x)] data-[swipe=move]:translate-x-[var(--radix-toast-swipe-move-x)] data-[swipe=move]:transition-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[swipe=end]:animate-out data-[state=closed]:fade-out-80 data-[state=closed]:slide-out-to-right-full data-[state=open]:slide-in-from-top-full data-[state=open]:sm:slide-in-from-bottom-full\",\n  {\n    variants: {\n      variant: {\n        default: \"border bg-background text-foreground\",\n        destructive:\n          \"destructive group border-destructive bg-destructive text-destructive-foreground\",\n      },\n    },\n    defaultVariants: {\n      variant: \"default\",\n    },\n  }\n)\n\nconst Toast = React.forwardRef<\n  React.ElementRef<typeof ToastPrimitives.Root>,\n  React.ComponentPropsWithoutRef<typeof ToastPrimitives.Root> &\n    VariantProps<typeof toastVariants>\n>(({ className, variant, ...props }, ref) => {\n  return (\n    <ToastPrimitives.Root\n      ref={ref}\n      className={cn(toastVariants({ variant }), className)}\n      {...props}\n    />\n  )\n})\nToast.displayName = ToastPrimitives.Root.displayName\n\nconst ToastAction = React.forwardRef<\n  React.ElementRef<typeof ToastPrimitives.Action>,\n  React.ComponentPropsWithoutRef<typeof ToastPrimitives.Action>\n>(({ className, ...props }, ref) => (\n  <ToastPrimitives.Action\n    ref={ref}\n    className={cn(\n      \"inline-flex h-8 shrink-0 items-center justify-center rounded-md border bg-transparent px-3 text-sm font-medium ring-offset-background transition-colors hover:bg-secondary focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 group-[.destructive]:border-muted/40 group-[.destructive]:hover:border-destructive/30 group-[.destructive]:hover:bg-destructive group-[.destructive]:hover:text-destructive-foreground group-[.destructive]:focus:ring-destructive\",\n      className\n    )}\n    {...props}\n  />\n))\nToastAction.displayName = ToastPrimitives.Action.displayName\n\nconst ToastClose = React.forwardRef<\n  React.ElementRef<typeof ToastPrimitives.Close>,\n  React.ComponentPropsWithoutRef<typeof ToastPrimitives.Close>\n>(({ className, ...props }, ref) => (\n  <ToastPrimitives.Close\n    ref={ref}\n    className={cn(\n      \"absolute right-2 top-2 rounded-md p-1 text-foreground/50 opacity-0 transition-opacity hover:text-foreground focus:opacity-100 focus:outline-none focus:ring-2 group-hover:opacity-100 group-[.destructive]:text-red-300 group-[.destructive]:hover:text-red-50 group-[.destructive]:focus:ring-red-400 group-[.destructive]:focus:ring-offset-red-600\",\n      className\n    )}\n    toast-close=\"\"\n    {...props}\n  >\n    <X className=\"h-4 w-4\" />\n  </ToastPrimitives.Close>\n))\nToastClose.displayName = ToastPrimitives.Close.displayName\n\nconst ToastTitle = React.forwardRef<\n  React.ElementRef<typeof ToastPrimitives.Title>,\n  React.ComponentPropsWithoutRef<typeof ToastPrimitives.Title>\n>(({ className, ...props }, ref) => (\n  <ToastPrimitives.Title\n    ref={ref}\n    className={cn(\"text-sm font-semibold\", className)}\n    {...props}\n  />\n))\nToastTitle.displayName = ToastPrimitives.Title.displayName\n\nconst ToastDescription = React.forwardRef<\n  React.ElementRef<typeof ToastPrimitives.Description>,\n  React.ComponentPropsWithoutRef<typeof ToastPrimitives.Description>\n>(({ className, ...props }, ref) => (\n  <ToastPrimitives.Description\n    ref={ref}\n    className={cn(\"text-sm opacity-90\", className)}\n    {...props}\n  />\n))\nToastDescription.displayName = ToastPrimitives.Description.displayName\n\ntype ToastProps = React.ComponentPropsWithoutRef<typeof Toast>\n\ntype ToastActionElement = React.ReactElement<typeof ToastAction>\n\nexport {\n  type ToastProps,\n  type ToastActionElement,\n  ToastProvider,\n  ToastViewport,\n  Toast,\n  ToastTitle,\n  ToastDescription,\n  ToastClose,\n  ToastAction,\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/toaster.tsx",
    "content": "\"use client\"\n\nimport {\n  Toast,\n  ToastClose,\n  ToastDescription,\n  ToastProvider,\n  ToastTitle,\n  ToastViewport,\n} from \"@/components/ui/toast\"\nimport { useToast } from \"@/components/ui/use-toast\"\n\nexport function Toaster() {\n  const { toasts } = useToast()\n\n  return (\n    <ToastProvider>\n      {toasts.map(function ({ id, title, description, action, ...props }) {\n        return (\n          <Toast key={id} {...props}>\n            <div className=\"grid gap-1\">\n              {title && <ToastTitle>{title}</ToastTitle>}\n              {description && (\n                <ToastDescription>{description}</ToastDescription>\n              )}\n            </div>\n            {action}\n            <ToastClose />\n          </Toast>\n        )\n      })}\n      <ToastViewport />\n    </ToastProvider>\n  )\n}\n"
  },
  {
    "path": "frontend/app/src/components/ui/toggle-group.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as ToggleGroupPrimitive from \"@radix-ui/react-toggle-group\"\nimport { type VariantProps } from \"class-variance-authority\"\n\nimport { cn } from \"@/lib/utils\"\nimport { toggleVariants } from \"@/components/ui/toggle\"\n\nconst ToggleGroupContext = React.createContext<\n  VariantProps<typeof toggleVariants>\n>({\n  size: \"default\",\n  variant: \"default\",\n})\n\nconst ToggleGroup = React.forwardRef<\n  React.ElementRef<typeof ToggleGroupPrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof ToggleGroupPrimitive.Root> &\n    VariantProps<typeof toggleVariants>\n>(({ className, variant, size, children, ...props }, ref) => (\n  <ToggleGroupPrimitive.Root\n    ref={ref}\n    className={cn(\"flex items-center justify-center gap-1\", className)}\n    {...props}\n  >\n    <ToggleGroupContext.Provider value={{ variant, size }}>\n      {children}\n    </ToggleGroupContext.Provider>\n  </ToggleGroupPrimitive.Root>\n))\n\nToggleGroup.displayName = ToggleGroupPrimitive.Root.displayName\n\nconst ToggleGroupItem = React.forwardRef<\n  React.ElementRef<typeof ToggleGroupPrimitive.Item>,\n  React.ComponentPropsWithoutRef<typeof ToggleGroupPrimitive.Item> &\n    VariantProps<typeof toggleVariants>\n>(({ className, children, variant, size, ...props }, ref) => {\n  const context = React.useContext(ToggleGroupContext)\n\n  return (\n    <ToggleGroupPrimitive.Item\n      ref={ref}\n      className={cn(\n        toggleVariants({\n          variant: context.variant || variant,\n          size: context.size || size,\n        }),\n        className\n      )}\n      {...props}\n    >\n      {children}\n    </ToggleGroupPrimitive.Item>\n  )\n})\n\nToggleGroupItem.displayName = ToggleGroupPrimitive.Item.displayName\n\nexport { ToggleGroup, ToggleGroupItem }\n"
  },
  {
    "path": "frontend/app/src/components/ui/toggle.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as TogglePrimitive from \"@radix-ui/react-toggle\"\nimport { cva, type VariantProps } from \"class-variance-authority\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst toggleVariants = cva(\n  \"inline-flex items-center justify-center rounded-md text-sm font-medium ring-offset-background transition-colors hover:bg-muted hover:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=on]:bg-accent data-[state=on]:text-accent-foreground\",\n  {\n    variants: {\n      variant: {\n        default: \"bg-transparent\",\n        outline:\n          \"border border-input bg-transparent hover:bg-accent hover:text-accent-foreground\",\n      },\n      size: {\n        default: \"h-10 px-3\",\n        sm: \"h-9 px-2.5\",\n        lg: \"h-11 px-5\",\n      },\n    },\n    defaultVariants: {\n      variant: \"default\",\n      size: \"default\",\n    },\n  }\n)\n\nconst Toggle = React.forwardRef<\n  React.ElementRef<typeof TogglePrimitive.Root>,\n  React.ComponentPropsWithoutRef<typeof TogglePrimitive.Root> &\n    VariantProps<typeof toggleVariants>\n>(({ className, variant, size, ...props }, ref) => (\n  <TogglePrimitive.Root\n    ref={ref}\n    className={cn(toggleVariants({ variant, size, className }))}\n    {...props}\n  />\n))\n\nToggle.displayName = TogglePrimitive.Root.displayName\n\nexport { Toggle, toggleVariants }\n"
  },
  {
    "path": "frontend/app/src/components/ui/tooltip.tsx",
    "content": "\"use client\"\n\nimport * as React from \"react\"\nimport * as TooltipPrimitive from \"@radix-ui/react-tooltip\"\n\nimport { cn } from \"@/lib/utils\"\n\nconst TooltipProvider = TooltipPrimitive.Provider\n\nconst Tooltip = TooltipPrimitive.Root\n\nconst TooltipTrigger = TooltipPrimitive.Trigger\n\nconst TooltipPortal = TooltipPrimitive.Portal\n\nconst TooltipContent = React.forwardRef<\n  React.ElementRef<typeof TooltipPrimitive.Content>,\n  React.ComponentPropsWithoutRef<typeof TooltipPrimitive.Content>\n>(({ className, sideOffset = 4, ...props }, ref) => (\n  <TooltipPrimitive.Content\n    ref={ref}\n    sideOffset={sideOffset}\n    className={cn(\n      \"z-50 overflow-hidden rounded-md border bg-popover px-3 py-1.5 text-sm text-popover-foreground shadow-md animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2\",\n      className\n    )}\n    {...props}\n  />\n))\nTooltipContent.displayName = TooltipPrimitive.Content.displayName\n\nexport { Tooltip, TooltipTrigger, TooltipPortal, TooltipContent, TooltipProvider }\n"
  },
  {
    "path": "frontend/app/src/components/ui/use-toast.ts",
    "content": "\"use client\"\n\n// Inspired by react-hot-toast library\nimport * as React from \"react\"\n\nimport type {\n  ToastActionElement,\n  ToastProps,\n} from \"@/components/ui/toast\"\n\nconst TOAST_LIMIT = 1\nconst TOAST_REMOVE_DELAY = 1000000\n\ntype ToasterToast = ToastProps & {\n  id: string\n  title?: React.ReactNode\n  description?: React.ReactNode\n  action?: ToastActionElement\n}\n\nconst actionTypes = {\n  ADD_TOAST: \"ADD_TOAST\",\n  UPDATE_TOAST: \"UPDATE_TOAST\",\n  DISMISS_TOAST: \"DISMISS_TOAST\",\n  REMOVE_TOAST: \"REMOVE_TOAST\",\n} as const\n\nlet count = 0\n\nfunction genId() {\n  count = (count + 1) % Number.MAX_SAFE_INTEGER\n  return count.toString()\n}\n\ntype ActionType = typeof actionTypes\n\ntype Action =\n  | {\n      type: ActionType[\"ADD_TOAST\"]\n      toast: ToasterToast\n    }\n  | {\n      type: ActionType[\"UPDATE_TOAST\"]\n      toast: Partial<ToasterToast>\n    }\n  | {\n      type: ActionType[\"DISMISS_TOAST\"]\n      toastId?: ToasterToast[\"id\"]\n    }\n  | {\n      type: ActionType[\"REMOVE_TOAST\"]\n      toastId?: ToasterToast[\"id\"]\n    }\n\ninterface State {\n  toasts: ToasterToast[]\n}\n\nconst toastTimeouts = new Map<string, ReturnType<typeof setTimeout>>()\n\nconst addToRemoveQueue = (toastId: string) => {\n  if (toastTimeouts.has(toastId)) {\n    return\n  }\n\n  const timeout = setTimeout(() => {\n    toastTimeouts.delete(toastId)\n    dispatch({\n      type: \"REMOVE_TOAST\",\n      toastId: toastId,\n    })\n  }, TOAST_REMOVE_DELAY)\n\n  toastTimeouts.set(toastId, timeout)\n}\n\nexport const reducer = (state: State, action: Action): State => {\n  switch (action.type) {\n    case \"ADD_TOAST\":\n      return {\n        ...state,\n        toasts: [action.toast, ...state.toasts].slice(0, TOAST_LIMIT),\n      }\n\n    case \"UPDATE_TOAST\":\n      return {\n        ...state,\n        toasts: state.toasts.map((t) =>\n          t.id === action.toast.id ? { ...t, ...action.toast } : t\n        ),\n      }\n\n    case \"DISMISS_TOAST\": {\n      const { toastId } = action\n\n      // ! Side effects ! - This could be extracted into a dismissToast() action,\n      // but I'll keep it here for simplicity\n      if (toastId) {\n        addToRemoveQueue(toastId)\n      } else {\n        state.toasts.forEach((toast) => {\n          addToRemoveQueue(toast.id)\n        })\n      }\n\n      return {\n        ...state,\n        toasts: state.toasts.map((t) =>\n          t.id === toastId || toastId === undefined\n            ? {\n                ...t,\n                open: false,\n              }\n            : t\n        ),\n      }\n    }\n    case \"REMOVE_TOAST\":\n      if (action.toastId === undefined) {\n        return {\n          ...state,\n          toasts: [],\n        }\n      }\n      return {\n        ...state,\n        toasts: state.toasts.filter((t) => t.id !== action.toastId),\n      }\n  }\n}\n\nconst listeners: Array<(state: State) => void> = []\n\nlet memoryState: State = { toasts: [] }\n\nfunction dispatch(action: Action) {\n  memoryState = reducer(memoryState, action)\n  listeners.forEach((listener) => {\n    listener(memoryState)\n  })\n}\n\ntype Toast = Omit<ToasterToast, \"id\">\n\nfunction toast({ ...props }: Toast) {\n  const id = genId()\n\n  const update = (props: ToasterToast) =>\n    dispatch({\n      type: \"UPDATE_TOAST\",\n      toast: { ...props, id },\n    })\n  const dismiss = () => dispatch({ type: \"DISMISS_TOAST\", toastId: id })\n\n  dispatch({\n    type: \"ADD_TOAST\",\n    toast: {\n      ...props,\n      id,\n      open: true,\n      onOpenChange: (open) => {\n        if (!open) dismiss()\n      },\n    },\n  })\n\n  return {\n    id: id,\n    dismiss,\n    update,\n  }\n}\n\nfunction useToast() {\n  const [state, setState] = React.useState<State>(memoryState)\n\n  React.useEffect(() => {\n    listeners.push(setState)\n    return () => {\n      const index = listeners.indexOf(setState)\n      if (index > -1) {\n        listeners.splice(index, 1)\n      }\n    }\n  }, [state])\n\n  return {\n    ...state,\n    toast,\n    dismiss: (toastId?: string) => dispatch({ type: \"DISMISS_TOAST\", toastId }),\n  }\n}\n\nexport { useToast, toast }\n"
  },
  {
    "path": "frontend/app/src/components/use-active-theme.ts",
    "content": "import { useTheme } from 'next-themes';\n\n/**\n * Respect 'system' and returns current style. Default to 'light'.\n */\nexport function useActiveTheme () {\n  const { theme, systemTheme } = useTheme();\n  const selectedTheme = theme === 'system' ? systemTheme : theme;\n\n  return selectedTheme === 'dark' ? 'dark' : 'light';\n}\n\nexport type AvailableTheme = ReturnType<typeof useActiveTheme>;"
  },
  {
    "path": "frontend/app/src/components/use-data-table.ts",
    "content": "'use client';\n\nimport type { Table } from '@tanstack/table-core';\nimport { createContext, useContext } from 'react';\n\nconst DataTableContext = createContext<Table<any> & {\n  reload?: () => void\n  loading?: boolean\n} | null>(null);\n\nexport const DataTableProvider = DataTableContext.Provider;\n\nexport const DataTableConsumer = DataTableContext.Consumer;\n\nexport function useDataTable () {\n  const table = useContext(DataTableContext);\n\n  if (!table) {\n    throw new Error('useDataTable must be called inside a DataTable');\n  }\n\n  return table;\n}\n"
  },
  {
    "path": "frontend/app/src/components/use-href.ts",
    "content": "import { usePathname, useSearchParams } from 'next/navigation';\nimport { useMemo } from 'react';\n\nconst EMPTY = new URLSearchParams();\n\nexport function useHref () {\n  const pathname = usePathname() ?? '';\n  const searchParams = useSearchParams() ?? EMPTY;\n\n  return useMemo(() => {\n    const search = searchParams.toString()\n    if (search) {\n      return `${pathname}?${search}`\n    } else {\n      return pathname;\n    }\n  }, [pathname, searchParams])\n}\n"
  },
  {
    "path": "frontend/app/src/components/use-latest-ref.tsx",
    "content": "import { useEffect, useRef } from 'react';\n\nexport function useLatestRef<T> (value: T) {\n  const ref = useRef(value);\n  useEffect(() => {\n    ref.current = value;\n  });\n\n  return ref;\n}\n"
  },
  {
    "path": "frontend/app/src/components/use-search-param.ts",
    "content": "import { usePathname, useRouter, useSearchParams } from 'next/navigation';\n\nexport function useSearchParam (key: string, defaultValue: string | null = null) {\n  const pathname = usePathname();\n  const searchParams = useSearchParams();\n  const router = useRouter();\n\n  const state = searchParams?.get(key);\n  const setState = (newValue: string | null) => {\n    const usp = new URLSearchParams(searchParams ?? {});\n    if (newValue == null) {\n      usp.delete(key);\n    } else {\n      usp.set(key, newValue);\n    }\n    router.push(pathname + '?' + usp.toString());\n  };\n\n  return [state ?? defaultValue, setState] as const;\n}"
  },
  {
    "path": "frontend/app/src/components/use-size.ts",
    "content": "import { useLayoutEffect, useRef, useState } from 'react';\n\nexport function useSize () {\n  const [size, setSize] = useState<DOMRectReadOnly | undefined>(undefined);\n  const ref = useRef<HTMLDivElement>(null);\n  useLayoutEffect(() => {\n    const el = ref.current;\n    if (el) {\n      const ro = new ResizeObserver(() => {\n        setSize(el.getBoundingClientRect());\n      });\n\n      setSize(el.getBoundingClientRect());\n      ro.observe(el);\n      ro.observe(document.documentElement);\n\n      // observe sidebar inset\n      for (let main of Array.from(document.getElementsByTagName('main'))) {\n        ro.observe(main);\n      }\n\n      return () => {\n        ro.disconnect();\n      };\n    }\n  }, []);\n\n  return {\n    ref, size,\n  };\n}"
  },
  {
    "path": "frontend/app/src/components/website-setting-provider.tsx",
    "content": "'use client';\n\nimport type { PublicWebsiteSettings } from '@/api/site-settings';\nimport { createContext, useContext } from 'react';\n\nexport const SettingContext = createContext<PublicWebsiteSettings>(null as any);\nexport const SettingProvider = SettingContext.Provider;\nexport const useSettingContext = () => {\n  return useContext(SettingContext);\n};\n"
  },
  {
    "path": "frontend/app/src/core/schema/NOTICE.md",
    "content": "# Note\n\nThis directory was deprecated.\n"
  },
  {
    "path": "frontend/app/src/core/schema/settings/security.ts",
    "content": "import { z } from 'zod';\n\nexport const reCaptchas = z.enum(['', 'v3', 'enterprise']);\n\nexport const SecuritySetting = z.object({\n  google_recaptcha: reCaptchas,\n  google_recaptcha_site_key: z.string().optional(),\n  google_recaptcha_secret_key: z.string().optional(),\n  google_recaptcha_enterprise_project_id: z.string().optional(),\n});\n\nexport const SecuritySettingResult = SecuritySetting.partial();\nexport type ISecuritySettingResult = z.infer<typeof SecuritySettingResult>;\n"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/api.mock.ts",
    "content": "import { fn } from '@storybook/test';\nimport * as api from './api';\n\nexport * from './api';\nexport const getVerify = fn(api.getVerify).mockName('getVerify');\n"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/api.react-server.ts",
    "content": "throw new Error('unavailable in react-server');\n"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/api.tidbai-widget.ts",
    "content": "export * from './api';\n"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/api.ts",
    "content": "import { handleResponse } from '@/lib/request';\nimport { z, type ZodType } from 'zod';\n\nexport const enum VerifyStatus {\n  CREATED = 'CREATED',\n  EXTRACTING = 'EXTRACTING',\n  VALIDATING = 'VALIDATING',\n  SUCCESS = 'SUCCESS',\n  FAILED = 'FAILED',\n  SKIPPED = 'SKIPPED'\n}\n\nexport interface MessageVerifyResponse {\n  status: VerifyStatus;\n  message?: string | null;\n  runs_report: string | null;\n}\n\nexport namespace MessageVerifyResponse {\n  export type Run = {\n    sql: string\n    explanation: string\n    success: boolean\n    results?: any[][]\n    sql_error_code?: number | null\n    sql_error_message?: string | null\n    llm_verification?: string | null\n    warnings?: string[]\n  }\n}\n\nconst getVerifyResponse = z.object({\n  status: z.enum([VerifyStatus.CREATED, VerifyStatus.EXTRACTING, VerifyStatus.VALIDATING, VerifyStatus.SUCCESS, VerifyStatus.FAILED, VerifyStatus.SKIPPED]),\n  message: z.string().nullish(),\n  runs_report: z.string().nullable(),\n}) satisfies ZodType<MessageVerifyResponse, any, any>;\n\nexport async function getVerify (url: string) {\n  return await fetch(url).then(handleResponse(getVerifyResponse));\n}\n\nexport function isFinalVerifyState (state: VerifyStatus) {\n  return [VerifyStatus.SUCCESS, VerifyStatus.FAILED, VerifyStatus.SKIPPED].includes(state);\n}\n\nexport function isVisibleVerifyState (state: VerifyStatus) {\n  return [VerifyStatus.SUCCESS, VerifyStatus.FAILED].includes(state);\n}\n\nfunction assertEnabled (value: string | undefined): asserts value is string {\n  if (!value) {\n    throw new Error('Experimental message verify not enabled.');\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/message-verify-result-markdown.tsx",
    "content": "import { rehypeHighlightOptions } from '@/components/remark-content/highlight';\nimport { cn } from '@/lib/utils';\nimport { capitalCase } from 'change-case-all';\nimport { h } from 'hastscript';\nimport { CheckIcon, XIcon } from 'lucide-react';\nimport type { Node, Parent, Root } from 'mdast';\nimport type { ReactNode } from 'react';\nimport * as jsxRuntime from 'react/jsx-runtime';\nimport rehypeHighlight from 'rehype-highlight';\nimport rehypeReact from 'rehype-react';\nimport remarkDirective from 'remark-directive';\nimport remarkGfm from 'remark-gfm';\nimport remarkParse from 'remark-parse';\nimport remarkRehype from 'remark-rehype';\nimport { format } from 'sql-formatter';\nimport { unified } from 'unified';\nimport { visit } from 'unist-util-visit';\nimport '@/components/remark-content/style.scss';\n\nexport function MessageVerifyResultMarkdown ({ content }: { content: string }) {\n  return (\n    <article className=\"remark-content prose prose-sm dark:prose-invert p-4\" style={{ maxWidth: 'inherit' }}>\n      {processor.processSync(content).result}\n    </article>\n  );\n}\n\nconst processor = unified()\n  .use(remarkParse)\n  .use(remarkGfm)\n  .use(formatSqlPlugin)\n  .use(remarkDirective)\n  .use(handleDirectivePlugin)\n  .use(remarkRehype)\n  .use(rehypeHighlight, rehypeHighlightOptions)\n  .use(rehypeReact, {\n    ...jsxRuntime as any,\n    components: {\n      MessageVerifyResult: ({ children, label, success }: { children: ReactNode, label: string, success: boolean }) => {\n        return (\n          <div className={cn('not-prose whitespace-break-spaces p-2 rounded text-xs', success ? 'bg-success/10' : 'bg-destructive/10')}>\n            <div className={cn('mb-2 flex gap-1 items-center', success ? 'text-success' : 'text-destructive')}>\n              {success ? <CheckIcon className=\"size-3\" /> : <XIcon className=\"size-3\" />}\n              <span>{label}</span>\n            </div>\n            {children}\n          </div>\n        );\n      },\n      Void: () => {\n        return null;\n      },\n    },\n  })\n  .freeze();\n\ndeclare module 'mdast' {\n  interface BlockContentMap {\n    containerDirective: ContainerDirective;\n  }\n\n  interface PhrasingContentMap {\n    leafDirective: LeafDirective;\n    textDirective: TextDirective;\n  }\n}\n\ninterface ContainerDirective extends Parent {\n  type: 'containerDirective';\n  name: string;\n  attributes: Record<string, string>;\n}\n\ninterface LeafDirective extends Node {\n  type: 'leafDirective';\n  name: string;\n  attributes: Record<string, string>;\n}\n\ninterface TextDirective extends Node {\n  type: 'textDirective';\n  name: string;\n  attributes: Record<string, string>;\n}\n\nfunction formatSqlPlugin () {\n  return function (tree: Root) {\n    visit(tree, function (node) {\n      if (node.type === 'code') {\n        if (node.lang === 'sql') {\n          try {\n            node.value = format(node.value, { language: 'mysql' });\n          } catch {\n          }\n        }\n      }\n    });\n  };\n}\n\nfunction handleDirectivePlugin () {\n  return function (tree: Root) {\n    visit(tree, function (node) {\n      if (\n        node.type === 'containerDirective'\n      ) {\n        const data = node.data || (node.data = {});\n        const hast = h(node.name, { ...node.attributes, label: capitalCase(node.name), success: node.name === 'success' });\n\n        data.hName = 'MessageVerifyResult';\n        data.hProperties = hast.properties;\n      }\n      if (\n        node.type === 'leafDirective' ||\n        node.type === 'textDirective'\n      ) {\n        const data = node.data || (node.data = {});\n        data.hName = 'Void';\n        data.hProperties = {};\n      }\n    });\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/message-verify.stories.tsx",
    "content": "import { AuthProvider } from '@/components/auth/AuthProvider';\nimport { ChatControllerProvider, useChatController } from '@/components/chat/chat-hooks';\nimport { ChatMessageController, LegacyChatMessageController } from '@/components/chat/chat-message-controller';\nimport { getVerify, VerifyStatus } from '@/experimental/chat-verify-service/api.mock';\nimport type { Meta, StoryObj } from '@storybook/react';\nimport { mutate } from 'swr';\nimport { MessageVerify } from './message-verify';\n\nconst exampleSql = `INSERT INTO orders (user_id, order_date) VALUES (3, '2023-05-03');`;\n\nconst meta = {\n  title: 'Experimental/MessageVerify',\n  component: undefined,\n  parameters: {\n    layout: 'centered',\n  },\n  tags: [],\n  argTypes: {},\n  args: {},\n  beforeEach: async () => {\n    await mutate(() => true, undefined, { revalidate: true });\n  },\n  render (_, { id }) {\n    const controller = useChatController(undefined, undefined, undefined);\n\n    return (\n      <AuthProvider key={id} isLoading={false} isValidating={false} me={{ email: 'foo@bar.com', is_active: true, is_superuser: true, is_verified: true, id: '000' }} reload={() => {}}>\n        <ChatControllerProvider controller={controller}>\n          <div style={{ width: 600 }}>\n            <MessageVerify\n              assistant={new LegacyChatMessageController({ finished_at: new Date(), id: 1, role: 'assistant', content: 'Answer', post_verification_result_url: 'http://foo/bar' } as any, undefined)}\n            />\n          </div>\n        </ChatControllerProvider>\n      </AuthProvider>\n    );\n  },\n} satisfies Meta<any>;\n\nexport default meta;\ntype Story = StoryObj<typeof meta>;\n\nexport const Creating: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(new Promise(() => {}));\n  },\n};\n\nexport const Created: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.resolve({\n      status: VerifyStatus.CREATED,\n      message: 'This is a created message returned from server',\n      runs_report: null,\n    }));\n  },\n};\n\nexport const Extracting: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.resolve({\n      status: VerifyStatus.EXTRACTING,\n      message: 'This is a extracting message returned from server',\n      runs_report: null,\n    }));\n  },\n};\n\nexport const Validating: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.resolve({\n      status: VerifyStatus.VALIDATING,\n      message: 'This is a validating message returned from server',\n      runs_report: null,\n    }));\n  },\n};\n\nexport const Verified: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.resolve({\n      status: VerifyStatus.SUCCESS,\n      message: 'This is a success message returned from server',\n      runs_report: '#### Create the \\'person\\' table with columns \\'id\\', \\'name\\', and \\'age\\'.\\n\\n:::success\\nThe SQL query executed successfully and created the \\'person\\' table with the specified columns \\'id\\', \\'name\\', and \\'age\\'.\\n:::\\n\\n```sql\\nCREATE TABLE person (id INT PRIMARY KEY, name VARCHAR(255), age INT);\\n```\\n\\n```\\n[[\\'Query OK, 0 row affected (0.295 sec)\\']]\\n```\\n\\n### Insert sample data into the \\'person\\' table.\\n\\n:::success\\nThe SQL query executed successfully and inserted the sample data into the \\'person\\' table as expected.\\n:::\\n\\n```sql\\nINSERT INTO person (id, name, age) VALUES (1, \\'Alice\\', 30), (2, \\'Bob\\', 25), (3, \\'Charlie\\', 35), (4, \\'David\\', 28), (5, \\'Eve\\', 22), (6, \\'Frank\\', 40);\\n```\\n\\n```\\n[[\\'Query OK, 6 row affected (0.164 sec)\\']]\\n```\\n\\n## Retrieve all rows from the \\'person\\' table where the \\'id\\' column is less than 5.\\n\\n:::success\\nThe SQL query executed successfully and retrieved all rows from the \\'person\\' table where the \\'id\\' column is less than 5, which matches the expected result.\\n:::\\n\\n```sql\\nSELECT * FROM person WHERE id < 5;\\n```\\n\\n```\\n[[1, \\'Alice\\', 30], [2, \\'Bob\\', 25], [3, \\'Charlie\\', 35], [4, \\'David\\', 28]]\\n```\\n\\n## Clean up the table.\\n\\n:::success\\nThe SQL query executed successfully and dropped the \\'person\\' table as expected, which matches the expected result.\\n:::\\n\\n```sql\\nDROP TABLE person;\\n```\\n\\n```\\n[[\\'Query OK, 0 row affected (0.473 sec)\\']]\\n```\\n\\n',\n    }));\n  },\n};\n\nexport const Failed: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.resolve({\n      status: VerifyStatus.FAILED,\n      message: 'This is a failed message returned from server',\n      runs_report: '## Create the \\'person\\' table with columns \\'id\\', \\'name\\', and \\'age\\'.\\n\\n:::failed\\nThe SQL query executed successfully and created the \\'person\\' table with the specified columns \\'id\\', \\'name\\', and \\'age\\'.\\n:::\\n\\n```sql\\nCREATE TABLE person (id INT PRIMARY KEY, name VARCHAR(255), age INT);\\n```\\n\\n```\\n[[\\'Query OK, 0 row affected (0.295 sec)\\']]\\n```\\n\\n## Insert sample data into the \\'person\\' table.\\n\\n:::success\\nThe SQL query executed successfully and inserted the sample data into the \\'person\\' table as expected.\\n:::\\n\\n```sql\\nINSERT INTO person (id, name, age) VALUES (1, \\'Alice\\', 30), (2, \\'Bob\\', 25), (3, \\'Charlie\\', 35), (4, \\'David\\', 28), (5, \\'Eve\\', 22), (6, \\'Frank\\', 40);\\n```\\n\\n```\\n[[\\'Query OK, 6 row affected (0.164 sec)\\']]\\n```\\n\\n## Retrieve all rows from the \\'person\\' table where the \\'id\\' column is less than 5.\\n\\n:::success\\nThe SQL query executed successfully and retrieved all rows from the \\'person\\' table where the \\'id\\' column is less than 5, which matches the expected result.\\n:::\\n\\n```sql\\nSELECT * FROM person WHERE id < 5;\\n```\\n\\n```\\n[[1, \\'Alice\\', 30], [2, \\'Bob\\', 25], [3, \\'Charlie\\', 35], [4, \\'David\\', 28]]\\n```\\n\\n## Clean up the table.\\n\\n:::success\\nThe SQL query executed successfully and dropped the \\'person\\' table as expected, which matches the expected result.\\n:::\\n\\n```sql\\nDROP TABLE person;\\n```\\n\\n```\\n[[\\'Query OK, 0 row affected (0.473 sec)\\']]\\n```\\n\\n',\n    }));\n  },\n};\n\nexport const Skipped: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.resolve({\n      'status': VerifyStatus.SKIPPED,\n      'message': 'No SQL examples found to validate.',\n      'runs_report': null,\n    }));\n  },\n};\n\nexport const ApiError: Story = {\n  beforeEach: () => {\n    getVerify.mockReturnValue(Promise.reject(new Error('This is error from server')));\n  },\n};"
  },
  {
    "path": "frontend/app/src/experimental/chat-verify-service/message-verify.tsx",
    "content": "import { getVerify, isFinalVerifyState, isVisibleVerifyState, type MessageVerifyResponse, VerifyStatus } from '#experimental/chat-verify-service/api';\nimport { useChatMessageField, useCurrentChatController } from '@/components/chat/chat-hooks';\nimport type { ChatMessageController } from '@/components/chat/chat-message-controller';\nimport { Button } from '@/components/ui/button';\nimport { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';\nimport { Skeleton } from '@/components/ui/skeleton';\nimport { MessageVerifyResultMarkdown } from '@/experimental/chat-verify-service/message-verify-result-markdown';\nimport { getErrorMessage } from '@/lib/errors';\nimport { isServerError } from '@/lib/request';\nimport { cn } from '@/lib/utils';\nimport { AnimatePresence, motion } from 'framer-motion';\nimport Highlight from 'highlight.js/lib/core';\nimport sql from 'highlight.js/lib/languages/sql';\nimport { CheckCircle2Icon, ChevronDownIcon, CircleMinus, Loader2Icon, RefreshCwIcon, TriangleAlertIcon } from 'lucide-react';\nimport { type ReactElement, type ReactNode, useEffect, useState } from 'react';\nimport useSWR from 'swr';\nimport '@/components/code-theme.scss';\n\nHighlight.registerLanguage('sql', sql);\n\nexport function MessageVerify ({ assistant }: { assistant: ChatMessageController | undefined }) {\n  const [open, setOpen] = useState(false);\n  const [notFound, setNotFound] = useState(false);\n\n  const controller = useCurrentChatController();\n  const messageVerifyUrl = useChatMessageField(assistant, 'post_verification_result_url');\n\n  const { data: result, isLoading: isLoadingResult, error: pollError } = useSWR(\n    messageVerifyUrl && `experimental.chat-message.post-verification.${messageVerifyUrl}`, () => getVerify(messageVerifyUrl!),\n    {\n      revalidateOnMount: true,\n      revalidateOnFocus: false,\n      errorRetryCount: 0,\n      refreshInterval: data => {\n        if (!data) {\n          return 0;\n        }\n        return !isFinalVerifyState(data.status) ? 1000 : 0;\n      },\n      onError: err => {\n        if (isServerError(err, 404)) {\n          setNotFound(true);\n        }\n\n        console.log(err);\n      },\n    },\n  );\n\n  const canOpen = result ? isVisibleVerifyState(result.status) : false;\n  const creating = (!result && isLoadingResult);\n  const error: unknown = pollError;\n\n  useEffect(() => {\n    console.debug(`[message-verify]`, result);\n  }, [result]);\n\n  if (!messageVerifyUrl || notFound) { // Remove isSuperuser check\n    return null;\n  }\n\n  return (\n    <Collapsible\n      open={canOpen ? open : false}\n      onOpenChange={setOpen}\n      className=\"p-2 border rounded-lg\"\n      disabled={!canOpen}\n    >\n      <CollapsibleTrigger asChild>\n        <Button className=\"group gap-2 w-full break-words max-w-full text-wrap text-left h-max\" variant=\"ghost\">\n          <MessageVerifyHeader result={result} creating={creating} error={error} />\n        </Button>\n      </CollapsibleTrigger>\n      <CollapsibleContent forceMount>\n        <AnimatePresence>\n          {open && result && <motion.div\n            transition={{\n              type: 'spring',\n              duration: 0.25,\n              bounce: false,\n            }}\n            initial={{ height: 0, opacity: 0, overflowY: 'hidden' }}\n            animate={{ height: 'auto', opacity: 1 }}\n            exit={{ height: 0, opacity: 0, overflowY: 'hidden' }}\n            style={{ width: 'var(--radix-collapsible-content-width)' }}\n            layout=\"size\"\n          >\n            <MessageVerifyResultMarkdown content={result.runs_report ?? ''} />\n          </motion.div>}\n        </AnimatePresence>\n      </CollapsibleContent>\n      {!!error && <div className=\"px-4 text-destructive text-xs\">{getErrorMessage(error) ?? defaultMessages.error}</div>}\n      <div className=\"my-2 px-4 flex items-center flex-wrap justify-between\">\n        <div className=\"text-xs text-muted-foreground\">\n          Powered by <a className=\"underline font-bold\" href=\"https://www.pingcap.com/tidb-cloud-serverless/?utm_source=tidb.ai&utm_medium=community\" target=\"_blank\">TiDB Serverless</a>\n        </div>\n        {result?.status === VerifyStatus.FAILED && controller.inputEnabled && (\n          <Button\n            size=\"sm\"\n            className=\"gap-1 text-xs px-2 py-1 h-max\"\n            variant=\"ghost\"\n            onClick={() => {\n              controller.input = result.runs_report ?? '';\n              controller.focusInput();\n            }}\n          >\n            <RefreshCwIcon size=\"1em\" />\n            Regenerate with validation messages\n          </Button>\n        )}\n      </div>\n    </Collapsible>\n  );\n}\n\nconst defaultMessages = {\n  'creating': 'Prepare to validate message...',\n  'error': 'Unknown error',\n  [VerifyStatus.CREATED]: 'Prepare to validate message...',\n  [VerifyStatus.EXTRACTING]: 'Extracting SQL...',\n  [VerifyStatus.VALIDATING]: 'Validation SQL...',\n  [VerifyStatus.SUCCESS]: 'Message validation succeed.',\n  [VerifyStatus.FAILED]: 'Message validation failed.',\n  [VerifyStatus.SKIPPED]: 'Message validated skipped.',\n};\n\nconst skippedIcon = <CircleMinus className=\"size-4\" />;\nconst loadingIcon = <Loader2Icon className=\"size-4 animate-spin repeat-infinite\" />;\nconst succeedIcon = <CheckCircle2Icon className=\"size-4 text-success\" />;\nconst failedIcon = <TriangleAlertIcon className=\"size-4 text-warning\" />;\nconst errorIcon = <TriangleAlertIcon className=\"size-4 text-destructive\" />;\n\nfunction MessageVerifyHeader ({ creating, error, result }: { creating?: boolean, error: unknown, result: MessageVerifyResponse | undefined }) {\n  let icon: ReactElement<any> | undefined;\n  let message: ReactNode | undefined;\n  const indicatorVisible = result ? isVisibleVerifyState(result.status) : false;\n\n  if (creating) {\n    icon = <Skeleton className=\"block w-4 h-4 my-0.5 rounded-full bg-muted-foreground/30\" />;\n    message = <Skeleton className=\"inline-block w-48 h-4 my-0.5 rounded bg-muted-foreground/30\" />;\n  } else if (error) {\n    icon = errorIcon;\n    message = 'Failed to get post validation result.';\n  } else {\n    switch (result?.status) {\n      case VerifyStatus.CREATED:\n      case VerifyStatus.EXTRACTING:\n      case VerifyStatus.VALIDATING:\n        icon = loadingIcon;\n        break;\n      case VerifyStatus.SUCCESS:\n        icon = succeedIcon;\n        break;\n      case VerifyStatus.FAILED:\n        icon = failedIcon;\n        break;\n      case VerifyStatus.SKIPPED:\n        icon = skippedIcon;\n        break;\n      default:\n        icon = undefined;\n        break;\n    }\n    message = result?.message ?? (result ? defaultMessages[result.status] : undefined) ?? 'Unknown validation state.';\n  }\n\n  return (\n    <>\n      {icon}\n      {message}\n      <ChevronDownIcon className={cn('size-4 ml-auto transition-transform group-data-[state=open]:rotate-180', indicatorVisible ? 'visible' : 'invisible')} />\n    </>\n  );\n}\n"
  },
  {
    "path": "frontend/app/src/experimental/experimental-features-provider.tsx",
    "content": "'use client';\n\nimport { createContext, type ReactNode, useContext } from 'react';\n\nexport interface ExperimentalFeatures {\n  enable_message_post_verification: boolean;\n}\n\nconst ExperimentalFeaturesContext = createContext<Partial<ExperimentalFeatures>>({});\n\nexport function ExperimentalFeaturesProvider ({ features, children }: { features: Partial<ExperimentalFeatures>, children: ReactNode }) {\n  return (\n    <ExperimentalFeaturesContext.Provider value={features}>\n      {children}\n    </ExperimentalFeaturesContext.Provider>\n  );\n}\n\nexport function useExperimentalFeatures () {\n  return useContext(ExperimentalFeaturesContext);\n}\n"
  },
  {
    "path": "frontend/app/src/experimental/experimental-features.ts",
    "content": "import type { ExperimentalFeatures } from '@/experimental/experimental-features-provider';\n\nexport function experimentalFeatures (): Partial<ExperimentalFeatures> {\n  return {\n    enable_message_post_verification: true,\n  };\n}\n"
  },
  {
    "path": "frontend/app/src/hooks/use-mobile.tsx",
    "content": "import * as React from \"react\"\n\nconst MOBILE_BREAKPOINT = 768\n\nexport function useIsMobile() {\n  const [isMobile, setIsMobile] = React.useState<boolean | undefined>(undefined)\n\n  React.useEffect(() => {\n    const mql = window.matchMedia(`(max-width: ${MOBILE_BREAKPOINT - 1}px)`)\n    const onChange = () => {\n      setIsMobile(window.innerWidth < MOBILE_BREAKPOINT)\n    }\n    mql.addEventListener(\"change\", onChange)\n    setIsMobile(window.innerWidth < MOBILE_BREAKPOINT)\n    return () => mql.removeEventListener(\"change\", onChange)\n  }, [])\n\n  return !!isMobile\n}\n"
  },
  {
    "path": "frontend/app/src/hooks/use-model-provider.ts",
    "content": "import type { ProviderOption } from '@/api/providers';\nimport { subscribeField } from '@/lib/tanstack-form';\nimport type { KeyOfType } from '@/lib/typing-utils';\nimport type { FormApi, ReactFormExtendedApi } from '@tanstack/react-form';\nimport { useEffect, useState } from 'react';\n\nexport function useModelProvider<O extends ProviderOption, F extends { provider: string, model?: string, credentials?: string | object, config?: any }> (\n  propForm: ReactFormExtendedApi<F>,\n  options: O[] | undefined,\n  defaultModelField: KeyOfType<O, string>,\n) {\n  const form = propForm as never as FormApi<{ provider: string, model?: string, credentials?: string, config: string }>;\n\n  const [provider, setProvider] = useState<O | undefined>(() => options?.find(option => option.provider === form.getFieldValue('provider' as never)));\n\n  useEffect(() => {\n    let lastProvider = form.getFieldValue('provider' as never);\n    const o = options?.find(option => option.provider === lastProvider);\n    setProvider(o);\n\n    return subscribeField(form, 'provider', name => {\n      const provider = options?.find(option => option.provider === name);\n\n      if (name) {\n        setProvider(provider);\n      } else {\n        setProvider(undefined);\n      }\n\n      if (provider) {\n        form.store.batch(() => {\n          form.setFieldValue('model', provider[defaultModelField] as string);\n          form.setFieldValue('credentials', '');\n          form.setFieldValue('config', JSON.stringify(provider.default_config, undefined, 2));\n        });\n      } else if (name) {\n        // Provider not found, clear all provider spec data.\n        form.store.batch(() => {\n          if (name) {\n            form.fieldInfo.provider?.instance?.setErrorMap({\n              onChange: `Invalid provider '${name}'`,\n            });\n          }\n          form.setFieldValue('model', '');\n          form.setFieldValue('credentials', '');\n          form.setFieldValue('config', '{}');\n        });\n      }\n    });\n  }, [form, options, defaultModelField]);\n\n  return provider;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/auth.ts",
    "content": "import { getMe, type MeInfo } from '@/api/users';\nimport { redirect } from 'next/navigation';\nimport { cache } from 'react';\n\nexport const auth = cache(() => getMe().catch(() => undefined));\n\nexport async function requireAuth (): Promise<MeInfo> {\n  const me = await auth();\n  if (!me) {\n    redirect('/auth/login');\n  }\n\n  return me;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/buffered-readable-stream.test.ts",
    "content": "import { bufferedReadableStreamTransformer } from '@/lib/buffered-readable-stream';\n\ntest('simple', async () => {\n  expect(await mock(['Hello, world!\\n'])).toEqual(['Hello, world!']);\n  expect(await mock(['Hello,', ' world!\\n'])).toEqual(['Hello, world!']);\n  expect(await mock(['Hello,', ' world!\\n', '\\n'])).toEqual(['Hello, world!', '']);\n  expect(await mock(['\\n', 'Hello,', ' world!\\n', '\\n'])).toEqual(['', 'Hello, world!', '']);\n  expect(await mock(['Hello, world!\\nHello, ', 'world!\\n'])).toEqual(['Hello, world!', 'Hello, world!']);\n  expect(await mock(['not finished'])).toEqual([]);\n});\n\ntest('stream error', async () => {\n  const stream = new ReadableStream({\n    async start (controller) {\n      controller.error('reason');\n    },\n  });\n\n  const reader = stream.pipeThrough(bufferedReadableStreamTransformer()).getReader();\n\n  try {\n    await reader.read();\n    throw new Error('should failing');\n  } catch (e) {\n    expect(e).toEqual('reason');\n  }\n});\n\nasync function mock (array: string[]) {\n  const stream = new ReadableStream({\n    async start (controller) {\n      const encoder = new TextEncoder();\n      for (let string of array) {\n        await new Promise((resolve) => setTimeout(resolve, 100));\n        controller.enqueue(encoder.encode(string));\n      }\n      controller.close();\n    },\n  });\n\n  const reader = stream.pipeThrough(bufferedReadableStreamTransformer()).getReader();\n\n  const transformedChunks: string[] = [];\n\n  while (true) {\n    const chunk = await reader.read();\n\n    if (chunk.done) {\n      break;\n    }\n\n    transformedChunks.push(chunk.value);\n  }\n\n  return transformedChunks;\n}"
  },
  {
    "path": "frontend/app/src/lib/buffered-readable-stream.ts",
    "content": "export function bufferedReadableStreamTransformer (): TransformStream<any, string> {\n  const decoder = new TextDecoder();\n  const buffer: string[] = [];\n\n  const appendTextChunk = (chunk: string) => {\n    if (buffer.length > 0 && !buffer[buffer.length - 1].endsWith('\\n')) {\n      buffer[buffer.length - 1] += chunk;\n    } else {\n      buffer.push(chunk);\n    }\n  };\n\n  const extractLines = () => {\n    const lines: string[] = [];\n    while (true) {\n      const data = buffer.shift();\n      if (data == null) break;\n\n      if (buffer.length > 0) {\n        // This branch might be never executed.\n        lines.push(...data.split('\\n'));\n      } else {\n        let start = 0, end: number = 0;\n\n        while (start < data.length) {\n          end = data.indexOf('\\n', start);\n          if (end === -1) {\n            break;\n          }\n\n          lines.push(data.slice(start, end));\n          start = end + 1;\n        }\n\n        if (start < data.length) {\n          buffer.push(data.slice(start));\n        }\n\n        break;\n      }\n    }\n\n    return lines;\n  };\n\n  return new TransformStream<any, string>({\n    transform (chunk, controller) {\n      const textChunk = decoder.decode(chunk, { stream: true });\n      appendTextChunk(textChunk);\n      extractLines().forEach(line => controller.enqueue(line));\n    },\n    flush (controller) {\n      const lines = extractLines();\n      lines.forEach(line => controller.enqueue(line));\n      if (buffer.length > 0) {\n        console.error('Stream is not finished, ignoring last chunk', buffer[0]);\n      }\n    },\n  });\n}\n"
  },
  {
    "path": "frontend/app/src/lib/errors.ts",
    "content": "import { ZodError } from 'zod';\n\nexport function getErrorMessage (e: unknown) {\n  if (!e) {\n    return 'Unknown error';\n  }\n  if (typeof e !== 'object') {\n    return String(e);\n  }\n\n  if (e instanceof ZodError) {\n    return `JSON validation failed: ${e.format()._errors.join(', ')}.`;\n  }\n\n  return ((e as any).message) || ((e as any).name) || String(e);\n}\n\nexport function getErrorName (error: unknown) {\n  if (!error) {\n    return 'UNKNOWN';\n  }\n  if (typeof error === 'object') {\n    return error.constructor.name;\n  }\n  return String(error);\n}\n"
  },
  {
    "path": "frontend/app/src/lib/react.ts",
    "content": "import type { ChangeEvent, SyntheticEvent } from 'react';\n\nexport function trigger<T extends typeof HTMLTextAreaElement | typeof HTMLInputElement> (inputElement: InstanceType<T>, Element: T, value: string) {\n  // https://stackoverflow.com/questions/23892547/what-is-the-best-way-to-trigger-change-or-input-event-in-react-js\n  const set = Object.getOwnPropertyDescriptor(Element.prototype, 'value')!.set!;\n  set.call(inputElement, value);\n  const event = new Event('input', { bubbles: true });\n  inputElement.dispatchEvent(event);\n}\n\nexport function isEvent (value: unknown): value is SyntheticEvent {\n  if (!value) {\n    return false;\n  }\n\n  if (typeof value !== 'object') {\n    return false;\n  }\n\n  for (const name of ['stopPropagation', 'preventDefault', 'type']) {\n    if (!(name in value)) {\n      return false;\n    }\n  }\n\n  return true;\n}\n\nexport function isChangeEvent (value: unknown): value is ChangeEvent {\n  return isEvent(value) && value.type === 'change';\n}\n"
  },
  {
    "path": "frontend/app/src/lib/request/authenticationHeaders.mock.ts",
    "content": "export * from './authenticationHeaders';\n"
  },
  {
    "path": "frontend/app/src/lib/request/authenticationHeaders.react-server.ts",
    "content": "import { cookies } from 'next/headers';\n\nexport async function authenticationHeaders (): Promise<Record<string, string>> {\n  const k = await cookies();\n\n  return { Cookie: k.toString() }; // Only work on rsc;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/request/authenticationHeaders.tidbai-widget.ts",
    "content": "export * from './authenticationHeaders';\n"
  },
  {
    "path": "frontend/app/src/lib/request/authenticationHeaders.ts",
    "content": "export async function authenticationHeaders (): Promise<Record<string, string>> {\n  return {};\n}\n"
  },
  {
    "path": "frontend/app/src/lib/request/base-url.mock.ts",
    "content": "export * from './base-url';\n"
  },
  {
    "path": "frontend/app/src/lib/request/base-url.react-server.ts",
    "content": "const BASE_URL = process.env.BASE_URL!;\n\nexport { BASE_URL };\n"
  },
  {
    "path": "frontend/app/src/lib/request/base-url.tidbai-widget.ts",
    "content": "let BASE_URL: string;\n\nconst script = document.currentScript as HTMLScriptElement | null;\nif (!script) {\n  throw new Error(`Widget not supported in this browser (evaluating document.currentScript)`);\n}\n// data-api-base\nif (script.dataset.apiBase) {\n  BASE_URL = script.dataset.apiBase;\n  console.debug('[tidbai.widget]', 'widget base url resolved by \"data-api-base\" attribute', BASE_URL);\n} else if (/^https?:\\/\\//.test(script.src)) {\n  const scriptUrl = new URL(script.src);\n  BASE_URL = scriptUrl.origin;\n  console.debug('[tidbai.widget]', 'widget base url resolved by script origin', BASE_URL);\n} else {\n  console.warn(`Add attribute \"data-api-base\"=\"YOUR_HOST\" to your widget script tag.`);\n  throw new Error(`Cannot initialize widget.`);\n}\n\nexport { BASE_URL }"
  },
  {
    "path": "frontend/app/src/lib/request/base-url.ts",
    "content": "const BASE_URL = process.env.BASE_URL || '';\n\nexport { BASE_URL };\n"
  },
  {
    "path": "frontend/app/src/lib/request/errors.ts",
    "content": "\nexport class ServerError extends Error {\n  constructor (readonly response: Response, message: string) {\n    if (response.headers.get('Content-Type')?.includes('text/html') || message.trimStart().startsWith('<!DOCTYPE') || message.trimStart().startsWith('<html')) {\n      message = `${response.status} ${response.statusText} HTML Error Page`;\n    }\n    super(message);\n  }\n}\n\nexport function isServerError (error: unknown, status?: number | number[]): error is ServerError {\n  if (error instanceof ServerError) {\n    if (status) {\n      if (typeof status === 'number') {\n        return error.response.status === status;\n      } else {\n        return status.includes(error.response.status);\n      }\n    }\n  }\n\n  return false;\n}\n\nexport function normalizeServerErrors (response: Response, error: unknown): ServerError {\n  if (error == null) {\n    return new ServerError(response, 'No error detail');\n  }\n\n  if (typeof error === 'object') {\n    if ('detail' in error && error.detail != null) {\n      if (typeof error.detail === 'string') {\n        return new ServerError(response, error.detail);\n      }\n      if (error.detail instanceof Array && error.detail[0] != null) {\n        return new ServerError(response, error.detail[0].msg ?? String(error.detail[0]));\n      }\n    }\n    if ('message' in error) {\n      return new ServerError(response, String(error.message));\n    }\n  }\n\n  console.error(error);\n\n  return new ServerError(response, String(error));\n}\n"
  },
  {
    "path": "frontend/app/src/lib/request/index.ts",
    "content": "export { authenticationHeaders } from '#lib/request/authenticationHeaders';\nexport { BASE_URL } from '#lib/request/base-url';\nexport * from './response-handlers';\nexport * from './errors';\nexport * from './params';\nexport { type Page, type PageParams, zodPage } from '../zod';\nexport * from './url';\nexport * from './list-all-helper';\n"
  },
  {
    "path": "frontend/app/src/lib/request/list-all-helper.ts",
    "content": "import type { Page, PageParams } from '@/lib/zod';\n\nexport async function listAllHelper<T> (api: (params: PageParams) => Promise<Page<T>>, idField: keyof T) {\n  let page = 1;\n  const chunks: Page<T>[] = [];\n\n  while (true) {\n    const current = await api({ page, size: 100 });\n    chunks.push(current);\n    if (page < current.pages) {\n      page += 1;\n    } else {\n      break;\n    }\n  }\n\n  const idSet = new Set();\n  const result: T[] = [];\n\n  for (const chunk of chunks) {\n    for (const item of chunk.items) {\n      if (!idSet.has(item[idField])) {\n        idSet.add(item[idField]);\n        result.push(item);\n      }\n    }\n  }\n\n  return result;\n}"
  },
  {
    "path": "frontend/app/src/lib/request/params.ts",
    "content": "export function buildUrlParams (object: object) {\n  const usp = new URLSearchParams();\n\n  for (let key of Object.keys(object)) {\n    const value = (object as any)[key];\n\n    if (value == null) {\n      continue;\n    }\n\n    if (value instanceof Array) {\n      for (let item of value) {\n        usp.append(key, stringify(item));\n      }\n    } else {\n      usp.append(key, stringify(value));\n    }\n  }\n\n  return usp;\n}\n\nfunction stringify (item: any) {\n  if (item instanceof Date) {\n    return item.toISOString();\n  } else {\n    return String(item);\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/lib/request/response-handlers.ts",
    "content": "import { normalizeServerErrors } from '@/lib/request/index';\nimport { z, ZodType } from 'zod';\n\nexport async function handleErrors (responseOrPromise: Response | PromiseLike<Response>): Promise<Response> {\n  const response = await responseOrPromise;\n  if (response.ok) {\n    return response;\n  }\n\n  try {\n    const jsonBody = await response.clone().json();\n    return Promise.reject(normalizeServerErrors(response, jsonBody));\n  } catch {\n    try {\n      const textBody = await response.clone().text();\n      return Promise.reject(normalizeServerErrors(response, textBody));\n    } catch {\n      return Promise.reject(normalizeServerErrors(response, `${response.status} ${response.statusText}`));\n    }\n  }\n}\n\nexport function handleResponse<S extends ZodType> (schema: S): ((responseOrPromise: Response | PromiseLike<Response>) => Promise<z.infer<S>>) {\n  return async (responseOrPromise) => {\n    const response = await Promise.resolve(responseOrPromise).then(handleErrors);\n    const body = await response.json();\n\n    try {\n      return schema.parse(body);\n    } catch (e) {\n      console.error(e);\n      console.error(`Cannot parse response json data for ${response.url} ${response.status}, check your frontend and backend versions.`, e);\n      throw e;\n    }\n  };\n}\n\nexport function handleNullableResponse<S extends ZodType> (schema: S): ((responseOrPromise: Response | PromiseLike<Response>) => Promise<z.infer<S> | null>) {\n  return async (responseOrPromise) => {\n    const response = await responseOrPromise;\n\n    if (response.status === 404) {\n      return null;\n    }\n\n    await handleErrors(response);\n    const body = await response.json();\n\n    try {\n      return schema.parse(body);\n    } catch (e) {\n      console.error(e);\n      console.error(`Cannot parse response json data for ${response.url} ${response.status}, check your frontend and backend versions.`, 2);\n      throw e;\n    }\n  };\n}"
  },
  {
    "path": "frontend/app/src/lib/request/url.ts",
    "content": "import { BASE_URL } from '#lib/request/base-url';\nimport { buildUrlParams } from '@/lib/request/params';\n\nexport function requestUrl (pathname: string, searchParams?: object) {\n  let url = BASE_URL + pathname;\n\n  if (searchParams) {\n    const usp = buildUrlParams(searchParams).toString();\n    if (usp) {\n      url += '?' + usp;\n    }\n  }\n\n  return url;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/index.ts",
    "content": "export * from './types' ;\nexport * from './model';\nexport * from './visit';\nexport * from './instructions';\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/instructions/index.ts",
    "content": "import { Assign, Calling, Fallback, Jmp, Reasoning } from './instructions';\nimport { InstructionsRegistry } from './registry';\n\nconst instructions = new InstructionsRegistry();\n\ninstructions.register('assign', new Assign());\ninstructions.register('jmp', new Jmp());\ninstructions.register('calling', new Calling());\ninstructions.register('reasoning', new Reasoning());\ninstructions.registerFallback(new Fallback());\n\nexport { instructions };\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/instructions/instructions.ts",
    "content": "import { model, type Step } from '..';\nimport { type ConnectInfo, InstructionModel } from './registry';\n\nexport class Assign extends InstructionModel<'assign'> {\n  outputs (step: Step<'assign'>): string[] {\n    return Object.keys(step.parameters);\n  }\n}\n\nexport class Calling extends InstructionModel<'calling'> {\n}\n\nexport class Jmp extends InstructionModel<'jmp'> {\n  connect (step: model.StepModel<'jmp'>): ConnectInfo[] {\n    if ('target_seq' in step.parameters) {\n      return [{\n        target: `step:${step.parameters.target_seq}`,\n        handle: 'target_seq',\n        required: true,\n      }];\n    } else {\n      return [\n        {\n          target: `step:${step.parameters.jump_if_true}`,\n          required: true,\n          handle: 'target_seq',\n        },\n        {\n          target: `step:${step.parameters.jump_if_false}`,\n          required: true,\n          handle: 'target_seq',\n        },\n      ];\n    }\n  }\n}\n\nexport class Reasoning extends InstructionModel<'reasoning'> {\n}\n\nexport class Fallback extends InstructionModel<string> {\n}\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/instructions/registry.ts",
    "content": "import { StackVM } from '../..';\nimport { extractTemplates } from '@/lib/strings';\n\nexport class InstructionsRegistry {\n  private instructions: Map<string, InstructionModel<StackVM.InstructionType>> = new Map();\n  private fallback = new InstructionModel<string>();\n\n  registerFallback (model: InstructionModel<string>) {\n    this.fallback = model;\n  }\n\n  register<I extends StackVM.InstructionType> (instruction: I, model: InstructionModel<I>) {\n    this.instructions.set(instruction, model);\n  }\n\n  visits (step: StackVM.Step) {\n    return (this.instructions.get(step.type) ?? this.fallback).visit(step);\n  }\n\n  getInputVars (step: StackVM.Step) {\n    return (this.instructions.get(step.type) ?? this.fallback).inputs(step);\n  }\n\n  getOutputVars (step: StackVM.Step) {\n    return (this.instructions.get(step.type) ?? this.fallback).outputs(step);\n  }\n\n  getConnections (step: StackVM.model.StepModel) {\n    return (this.instructions.get(step.type) ?? this.fallback).connect(step);\n  }\n}\n\ntype VisitResult = boolean | StackVM.SeqNo[];\n\nexport type ConnectInfo = { required?: boolean, target: string, handle: string }\n\nexport class InstructionModel<I extends StackVM.InstructionType> {\n  visit (_step: StackVM.Step<I>): VisitResult {\n    // By default, will visit next seq\n    return true;\n  }\n\n  inputs (step: StackVM.Step<I>): string[] {\n    function recursiveExtractTemplates (value: unknown, footprints: Set<unknown>): string[] {\n      if (typeof value === 'string') {\n        return extractTemplates(value);\n      }\n      if (value != null && typeof value === 'object') {\n        if (footprints.has(value)) {\n          return [];\n        }\n        footprints.add(value);\n        if (value instanceof Array) {\n          return value.flatMap(item => recursiveExtractTemplates(item, footprints));\n        } else {\n          return recursiveExtractTemplates(Object.values(value), footprints);\n        }\n      }\n      return [];\n    }\n\n    return Object.values(step.parameters).flatMap(value => recursiveExtractTemplates(value, new Set()));\n  }\n\n  outputs (step: StackVM.Step<I>): string[] {\n    const parameters: string[] = [];\n    // For special instructions\n    if ('output_vars' in step.parameters) {\n      if (typeof step.parameters.output_vars === 'string') {\n        parameters.push(step.parameters.output_vars);\n      } else if (step.parameters.output_vars instanceof Array) {\n        step.parameters.output_vars.forEach(item => {\n          if (typeof item === 'string') {\n            parameters.push(item);\n          }\n        });\n      }\n    }\n\n    return parameters;\n  }\n\n  // Gen graph\n\n  connect (step: StackVM.model.StepModel<I>): ConnectInfo[] {\n    return [{\n      required: false,\n      target: `step:${step.seq_no + 1}`,\n      handle: 'out',\n    }];\n  }\n}\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/model.ts",
    "content": "import { instructions } from './instructions';\nimport { type InstructionType, type State, type Step } from './types';\n\nexport namespace model {\n  export interface VarBindingInfo {\n    id: string;\n    parameter?: string;\n  }\n\n  export interface StepModel<I extends InstructionType = InstructionType> extends Step<I> {\n    id: string;\n    output_vars: VarBindingInfo[];\n    input_vars: VarBindingInfo[];\n  }\n\n  export interface VarModel {\n    id: string;\n    name: string;\n    value?: unknown;\n    arc?: number;\n    execution_state?: 'write';\n\n    [key: string]: unknown;\n  }\n\n  export interface ParsedState extends State {\n    plan: {\n      steps: StepModel[];\n      vars: VarModel[];\n    };\n  }\n\n  export function parseState (state: State): ParsedState {\n    const steps = new Map<string, StepModel>;\n    const vars = new Set<string>;\n\n    for (const step of state.current_plan) {\n      const id = `step:${step.seq_no}`;\n      const output_vars: VarBindingInfo[] = [];\n      const input_vars: VarBindingInfo[] = [];\n\n      for (let name of instructions.getInputVars(step)) {\n        addVar(input_vars, {\n          id: `var:${name}`,\n          parameter: name,\n        });\n        vars.add(name);\n      }\n\n      for (let name of instructions.getOutputVars(step)) {\n        addVar(output_vars, {\n          id: `var:${name}`,\n          parameter: name,\n        });\n        vars.add(name);\n      }\n\n      steps.set(id, {\n        ...step,\n        id,\n        output_vars,\n        input_vars,\n      });\n    }\n\n    return {\n      ...state,\n      plan: {\n        steps: Array.from(steps.values()),\n        vars: Array.from(vars).map(name => ({ id: `var:${name}`, name })),\n      },\n    };\n  }\n\n  export interface StatePatch {\n    steps: Patch<StepModel>;\n    vars: Patch<VarModel>;\n  }\n\n  export function diffStates (from: ParsedState, to: ParsedState): StatePatch {\n    return {\n      steps: diff(from.plan.steps, to.plan.steps),\n      vars: diff(from.plan.vars, to.plan.vars),\n    };\n  }\n\n  function addVar (bindings: VarBindingInfo[], newOne: VarBindingInfo) {\n    if (!bindings.find(binding => binding.id === newOne.id)) {\n      bindings.push(newOne);\n    }\n  }\n}\n\ntype Patch<T extends { id: string }> = {\n  del: string[]\n  add: T[]\n  update: Record<string, T>\n}\n\nfunction diff<T extends { id: string }> (from: T[], to: T[]): Patch<T> {\n  const oldItems = new Set(from.map(item => item.id));\n  const newItems = new Map(to.map(item => [item.id, item]));\n\n  const patch: Patch<T> = {\n    del: [],\n    add: [],\n    update: {},\n  };\n\n  oldItems.forEach(id => {\n    const step = newItems.get(id);\n    if (step) {\n      patch.update[id] = step;\n    } else {\n      patch.del.push(id);\n    }\n  });\n\n  newItems.forEach((step, id) => {\n    if (!oldItems.has(id)) {\n      patch.add.push(step);\n    }\n  });\n\n  return patch;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/types.ts",
    "content": "export type SeqNo = number\n\nexport interface KnownInstructions {\n  assign: Record<string, unknown>;\n\n  jmp: {\n    condition_prompt: string\n    context: string | null\n    jump_if_true: SeqNo\n    jump_if_false: SeqNo\n  } | {\n    target_seq: SeqNo\n  };\n\n  calling: {\n    tool_name: string\n    tool_params: Record<string, unknown>\n    output_vars: string | string[]\n  };\n\n  reasoning: {\n    chain_of_thoughts: string\n    dependency_analysis: string\n  };\n}\n\nexport interface Instructions extends KnownInstructions {\n  [key: string]: Record<string, unknown>;\n}\n\nexport type InstructionType = string & keyof Instructions;\n\nexport interface Step<Instruction extends InstructionType = InstructionType> {\n  seq_no: SeqNo;\n  type: Instruction;\n  parameters: Instructions[Instruction];\n\n  [key: string]: unknown;\n}\n\nexport interface Plan {\n  steps: Step[];\n\n  [key: string]: unknown;\n}\n\nexport interface State {\n  current_plan: Step[];\n  errors: string[];\n  goal: string;\n  goal_completed: boolean;\n  msgs: Record<string, string>[];\n  program_counter: number;\n  variables: Record<string, unknown>;\n  variables_refs: Record<string, number>;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/stackvm/core/visit.ts",
    "content": "import { model } from './model';\n\nexport namespace algo {\n\n  export function visit (state: model.ParsedState, stepId: string, iterFunc: (step: model.StepModel) => void | false) {\n    const steps = new Map(state.plan.steps.map(step => [step.id, step]));\n    // const vars = new Map(plan.vars.map(v => [v.id, v]));\n\n    if (!steps.has(stepId)) {\n      throw new Error(`Invalid step ${stepId}`);\n    }\n\n    const footprint = new Set<string>();\n    const queue = [steps.get(stepId)!];\n\n    while (queue.length > 0) {\n      const step = queue.shift()!;\n      footprint.add(step.id);\n\n      if (iterFunc(step) === false) {\n        break;\n      }\n\n      switch (step.type) {\n        case 'jmp':\n          if ('target_seq' in step.parameters) {\n            const next = steps.get(`step:${step.parameters.target_seq}`)!;\n            if (!footprint.has(next.id)) {\n              queue.push(next);\n            }\n          } else {\n            const nextTrue = steps.get(`step:${step.parameters.jump_if_true}`)!;\n            const nextFalse = steps.get(`step:${step.parameters.jump_if_false}`)!;\n            if (!footprint.has(nextTrue.id)) {\n              queue.push(nextTrue);\n            }\n            if (!footprint.has(nextFalse.id)) {\n              queue.push(nextFalse);\n            }\n          }\n          break;\n        default:\n          if (steps.has(`step:${step.seq_no + 1}`)) {\n            const next = steps.get(`step:${step.seq_no + 1}`)!;\n            if (!footprint.has(next.id)) {\n              queue.push(next);\n            }\n          }\n          break;\n      }\n    }\n  }\n}"
  },
  {
    "path": "frontend/app/src/lib/stackvm/index.ts",
    "content": "export * as StackVM from './core';\n"
  },
  {
    "path": "frontend/app/src/lib/strings.ts",
    "content": "const cache = new Map<string, string[]>;\n\nexport function extractTemplates (text: string) {\n  const cached = cache.get(text);\n  if (cached) {\n    return cached;\n  }\n\n  const STATE_NO = 0;\n  const STATE_DOLLAR = 1;\n  const STATE_BRACE = 2;\n\n  const names = new Set<string>();\n\n  let state: 0 | 1 | 2 = STATE_NO;\n\n  let s = -1;\n  let i = 0;\n\n  while (i < text.length) {\n    const c = text[i];\n\n    switch (c) {\n      case '\\\\':\n        i += 1;\n        break;\n      case '$':\n        if (state !== STATE_BRACE) {\n          state = STATE_DOLLAR;\n        }\n        break;\n      case '{':\n        if (state === STATE_DOLLAR) {\n          state = STATE_BRACE;\n          s = i;\n        }\n        break;\n      case '}':\n        if (state === STATE_BRACE) {\n          names.add(text.slice(s + 1, i));\n          state = STATE_NO;\n        }\n        break;\n      default:\n        break;\n    }\n\n    i += 1;\n  }\n\n  const result = Array.from(names);\n  cache.set(text, result);\n  return result;\n}\n"
  },
  {
    "path": "frontend/app/src/lib/tanstack-form.ts",
    "content": "import type { DeepKeys, DeepValue, FormApi } from '@tanstack/react-form';\n\nexport function subscribeField<TData, TName extends DeepKeys<TData>> (\n  form: FormApi<TData>,\n  name: TName,\n  cb: (value: DeepValue<TData, TName>, oldValue: DeepValue<TData, TName>) => void,\n) {\n  let oldValue = form.getFieldValue(name);\n\n  return form.store.subscribe(() => {\n    const newValue = form.getFieldValue(name);\n\n    if (newValue !== oldValue) {\n      const ol = oldValue;\n      oldValue = newValue;\n      cb(newValue, ol);\n    }\n  });\n}\n"
  },
  {
    "path": "frontend/app/src/lib/typing-utils.ts",
    "content": "export type KeyOfType<T, Value> = keyof { [P in keyof T as T[P] extends Value ? P : never]: any }\n"
  },
  {
    "path": "frontend/app/src/lib/ui-error.tsx",
    "content": "import { toast } from 'sonner';\n\nexport function toastSuccess (message: string) {\n  toast.success(message);\n}\n\nexport function toastError (title: string, error: unknown) {\n  toast.error(title, { description: getErrorMessage(error) });\n}\n\nfunction getErrorMessage (error: unknown) {\n  if (!error) {\n    return 'Unknown error message';\n  }\n  if (typeof error === 'object') {\n    if ('message' in error) {\n      return String(error.message);\n    }\n  }\n  return String(error);\n}\n"
  },
  {
    "path": "frontend/app/src/lib/utils.ts",
    "content": "import { type ClassValue, clsx } from \"clsx\"\nimport { twMerge } from \"tailwind-merge\"\n\nexport function cn(...inputs: ClassValue[]) {\n  return twMerge(clsx(inputs))\n}\n"
  },
  {
    "path": "frontend/app/src/lib/zod.test.ts",
    "content": "import { zodFile, zodJsonDate, zodJsonText } from '@/lib/zod';\n\ntest('zodDate', async () => {\n  expect(zodJsonDate().safeParse('2024-08-09T01:28:40.370Z').success).toBe(true);\n  expect(zodJsonDate().safeParse('2024-08-09T01:28:40.370').success).toBe(true);\n});\n\ntest('zodJsonText', async () => {\n  expect(zodJsonText().safeParse('{}').success).toBe(true);\n  expect(zodJsonText().safeParse('null').success).toBe(true);\n  expect(zodJsonText().safeParse('undefined').success).toBe(false);\n  expect(zodJsonText().safeParse('{').success).toBe(false);\n});\n\ntest('zodFile', async () => {\n  expect(zodFile().safeParse(new File([], '')).success).toBe(true);\n  expect(zodFile().safeParse({}).success).toBe(false);\n});\n"
  },
  {
    "path": "frontend/app/src/lib/zod.ts",
    "content": "import { parse, parseJSON, startOfToday } from 'date-fns';\nimport { z, ZodType } from 'zod';\n\nconst BASE_DATE = startOfToday();\n\nexport function zodDateOnlyString () {\n  return z.string().regex(/^\\d\\d\\d\\d-\\d\\d-\\d\\d$/).transform(str => {\n    return parse(str, 'yyyy-MM-dd', BASE_DATE);\n  });\n}\n\nexport function zodJsonDate (message?: string) {\n  return z.string().pipe(d);\n}\n\nexport function zodJsonText () {\n  return z.string()\n    .refine(val => {\n      try {\n        JSON.parse(val);\n        return true;\n      } catch {\n        return false;\n      }\n    }, { message: 'Invalid JSON' })\n    .transform(value => JSON.parse(value));\n}\n\nexport function zodFile () {\n  return z.custom<File>(value => value instanceof File);\n}\n\nexport function zodJson () {\n  return z.union([\n    z.string(),\n    z.number(),\n    z.boolean(),\n    z.any().array(),\n    z.object({}).passthrough(),\n  ]);\n}\n\nconst d = z\n  .custom<string>(data => {\n    if (typeof data !== 'string') {\n      throw new Error('Requires a date string');\n    }\n\n    const date = parseJSON(data);\n    return !isNaN(date.getTime());\n  }, { message: 'Invalid date' })\n  .transform(out => {\n    return parseJSON(out);\n  });\n\nexport interface PageParams {\n  page?: number; // 1 based\n  size?: number;\n}\n\nexport interface Page<T> {\n  items: T[];\n  total: number;\n  page: number;\n  size: number;\n  pages: number;\n}\n\nexport function zodPage<Z extends ZodType> (itemSchema: Z) {\n  return z.object({\n    items: itemSchema.array(),\n    total: z.number(),\n    page: z.number(),\n    size: z.number(),\n    pages: z.number(),\n  });\n}\n\nexport function noPage<T> (data: T[]): Page<T> {\n  return {\n    items: data,\n    total: data.length,\n    page: 1,\n    size: data.length,\n    pages: 1,\n  };\n}"
  },
  {
    "path": "frontend/app/tailwind.config.ts",
    "content": "import type { Config } from 'tailwindcss';\n\nconst config: Config = {\n  darkMode: ['class'],\n  content: [\n    './pages/**/*.{js,jsx,ts,tsx}',\n    './components/**/*.{js,jsx,ts,tsx}',\n    './app/**/*.{js,jsx,ts,tsx}',\n    './src/**/*.{js,jsx,ts,tsx}',\n  ],\n  prefix: '',\n  theme: {\n    container: {\n      center: true,\n      padding: '2rem',\n      screens: {\n        '2xl': '1400px',\n      },\n    },\n    extend: {\n      minHeight: {\n        body: 'var(--body-height)',\n        content: 'var(--content-height)',\n      },\n      height: {\n        header: 'var(--header-height)',\n        body: 'var(--body-height)',\n        content: 'var(--content-height)',\n      },\n      width: {\n        side: 'var(--sidebar-width)',\n        content: 'var(--content-width)',\n      },\n      padding: {\n        body: 'var(--body-padding)',\n        side: 'var(--sidebar-width)',\n      },\n      margin: {\n        side: 'var(--sidebar-width)',\n      },\n      colors: {\n        border: 'hsl(var(--border))',\n        input: 'hsl(var(--input))',\n        ring: 'hsl(var(--ring))',\n        background: 'hsl(var(--background))',\n        foreground: 'hsl(var(--foreground))',\n        primary: {\n          DEFAULT: 'hsl(var(--primary))',\n          foreground: 'hsl(var(--primary-foreground))',\n        },\n        warning: {\n          DEFAULT: 'hsl(var(--warning))',\n          foreground: 'hsl(var(--warning-foreground))',\n        },\n        info: {\n          DEFAULT: 'hsl(var(--info))',\n          foreground: 'hsl(var(--info-foreground))',\n        },\n        success: {\n          DEFAULT: 'hsl(var(--success))',\n          foreground: 'hsl(var(--success-foreground))',\n        },\n        secondary: {\n          DEFAULT: 'hsl(var(--secondary))',\n          foreground: 'hsl(var(--secondary-foreground))',\n        },\n        destructive: {\n          DEFAULT: 'hsl(var(--destructive))',\n          foreground: 'hsl(var(--destructive-foreground))',\n        },\n        muted: {\n          DEFAULT: 'hsl(var(--muted))',\n          foreground: 'hsl(var(--muted-foreground))',\n        },\n        accent: {\n          DEFAULT: 'hsl(var(--accent))',\n          foreground: 'hsl(var(--accent-foreground))',\n        },\n        popover: {\n          DEFAULT: 'hsl(var(--popover))',\n          foreground: 'hsl(var(--popover-foreground))',\n        },\n        card: {\n          DEFAULT: 'hsl(var(--card))',\n          foreground: 'hsl(var(--card-foreground))',\n        },\n        sidebar: {\n          DEFAULT: 'hsl(var(--sidebar-background))',\n          foreground: 'hsl(var(--sidebar-foreground))',\n          primary: 'hsl(var(--sidebar-primary))',\n          'primary-foreground': 'hsl(var(--sidebar-primary-foreground))',\n          accent: 'hsl(var(--sidebar-accent))',\n          'accent-foreground': 'hsl(var(--sidebar-accent-foreground))',\n          border: 'hsl(var(--sidebar-border))',\n          ring: 'hsl(var(--sidebar-ring))',\n        },\n      },\n      borderRadius: {\n        lg: 'var(--radius)',\n        md: 'calc(var(--radius) - 2px)',\n        sm: 'calc(var(--radius) - 4px)',\n      },\n      keyframes: {\n        'accordion-down': {\n          from: {\n            height: '0',\n          },\n          to: {\n            height: 'var(--radix-accordion-content-height)',\n          },\n        },\n        'accordion-up': {\n          from: {\n            height: 'var(--radix-accordion-content-height)',\n          },\n          to: {\n            height: '0',\n          },\n        },\n        'fade-in-right': {\n          from: {\n            transform: 'translate3d(30%, 0, 0)',\n            opacity: '0',\n          },\n          to: {\n            transform: 'translate3d(0, 0, 0)',\n            opacity: '1',\n          },\n        },\n        'fade-in-left': {\n          from: {\n            transform: 'translate3d(-30%, 0, 0)',\n            opacity: '0',\n          },\n          to: {\n            transform: 'translate3d(0, 0, 0)',\n            opacity: '1',\n          },\n        },\n      },\n      animation: {\n        'accordion-down': 'accordion-down 0.2s ease-out',\n        'accordion-up': 'accordion-up 0.2s ease-out',\n        'fade-in-right': 'fade-in-right 0.5s ease-out',\n        'fade-in-left': 'fade-in-left 0.5s ease-out',\n      },\n      backgroundImage: {\n        'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))',\n        'gradient-conic': 'conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))',\n      },\n    },\n  },\n  plugins: [\n    require('tailwindcss-animate'),\n    require('@tailwindcss/typography'),\n  ],\n};\n\nexport default config;\n"
  },
  {
    "path": "frontend/app/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"lib\": [\n      \"dom\",\n      \"dom.iterable\",\n      \"esnext\"\n    ],\n    \"allowJs\": true,\n    \"skipLibCheck\": true,\n    \"strict\": true,\n    \"noEmit\": true,\n    \"esModuleInterop\": true,\n    \"module\": \"esnext\",\n    \"moduleResolution\": \"bundler\",\n    \"resolveJsonModule\": true,\n    \"isolatedModules\": true,\n    \"jsx\": \"preserve\",\n    \"incremental\": true,\n    \"plugins\": [\n      {\n        \"name\": \"next\"\n      }\n    ],\n    \"paths\": {\n      \"@/*\": [\n        \"./src/*\"\n      ]\n    },\n    \"target\": \"ES2017\"\n  },\n  \"include\": [\n    \"next-env.d.ts\",\n    \"**/*.ts\",\n    \"**/*.tsx\",\n    \".next/types/**/*.ts\"\n  ],\n  \"exclude\": [\n    \"node_modules\"\n  ]\n}\n"
  },
  {
    "path": "frontend/package.json",
    "content": "{\n  \"name\": \"tidb-ai-parent\",\n  \"private\": true,\n  \"version\": \"0.0.0\",\n  \"license\": \"Apache-2.0\",\n  \"scripts\": {\n    \"test\": \"pnpm run --filter '*' test\",\n    \"dev\": \"pnpm run --filter 'app' dev\",\n    \"storybook\": \"pnpm run --filter 'app' storybook\",\n    \"build:widget-react\": \"pnpm run --filter '@tidb.ai/react' build\",\n    \"build:docker\": \"pnpm run --filter 'app' build:standalone\",\n    \"build\": \"pnpm run --filter 'app' build\",\n    \"verify\": \"pnpm run test && pnpm run build && pnpm run build:widget-react\"\n  },\n  \"packageManager\": \"pnpm@9.15.0\",\n  \"pnpm\": {\n    \"patchedDependencies\": {\n      \"jest-runtime@29.7.0\": \"patches/jest-runtime@29.7.0.patch\",\n      \"@jest/environment@29.7.0\": \"patches/@jest__environment@29.7.0.patch\"\n    },\n    \"overrides\": {\n      \"@types/react\": \"19.0.1\",\n      \"@types/react-dom\": \"19.0.2\"\n    }\n  }\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/.eslintrc.cjs",
    "content": "module.exports = {\n  root: true,\n  env: { browser: true, es2020: true },\n  extends: [\n    'eslint:recommended',\n    'plugin:@typescript-eslint/recommended',\n    'plugin:react-hooks/recommended',\n  ],\n  ignorePatterns: ['dist', '.eslintrc.cjs'],\n  parser: '@typescript-eslint/parser',\n  plugins: ['react-refresh'],\n  rules: {\n    'react-refresh/only-export-components': [\n      'warn',\n      { allowConstantExport: true },\n    ],\n  },\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/.gitignore",
    "content": "# Logs\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\npnpm-debug.log*\nlerna-debug.log*\n\nnode_modules\ndist\ndist-ssr\n*.local\n\n# Editor directories and files\n.vscode/*\n!.vscode/extensions.json\n.idea\n.DS_Store\n*.suo\n*.ntvs*\n*.njsproj\n*.sln\n*.sw?\n\nstats.html\n"
  },
  {
    "path": "frontend/packages/widget-react/README.md",
    "content": "# React + TypeScript + Vite\n\nThis template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.\n\nCurrently, two official plugins are available:\n\n- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh\n- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh\n\n## Expanding the ESLint configuration\n\nIf you are developing a production application, we recommend updating the configuration to enable type aware lint rules:\n\n- Configure the top-level `parserOptions` property like this:\n\n```js\nexport default {\n  // other rules...\n  parserOptions: {\n    ecmaVersion: 'latest',\n    sourceType: 'module',\n    project: ['./tsconfig.json', './tsconfig.node.json'],\n    tsconfigRootDir: __dirname,\n  },\n}\n```\n\n- Replace `plugin:@typescript-eslint/recommended` to `plugin:@typescript-eslint/recommended-type-checked` or `plugin:@typescript-eslint/strict-type-checked`\n- Optionally add `plugin:@typescript-eslint/stylistic-type-checked`\n- Install [eslint-plugin-react](https://github.com/jsx-eslint/eslint-plugin-react) and add `plugin:react/recommended` & `plugin:react/jsx-runtime` to the `extends` list\n"
  },
  {
    "path": "frontend/packages/widget-react/USAGE.md",
    "content": "# Usage of tidb.ai widget\n\n## Floating Button\n\nTiDB.ai Widget will automatically create a floating button at right bottom corner of your site.\n\n```html\n<script\n  async\n  src=\"{{YOUR_SITE}}/widget.js\"\n  data-api-base=\"{{YOUR_SITE}}\"\n>\n</script>\n```\n\n## Custom Trigger Element\n\nTiDB.ai Widget will listen the element's `click` event. The custom trigger element must have id `tidb-ai-trigger`\n\n```html\n<button id=\"tidb-ai-trigger\">My Trigger</button>\n\n<!-- ... -->\n\n<script\n  async\n  src=\"{{YOUR_SITE}}/widget.js\"\n  data-api-base=\"{{YOUR_SITE}}\"\n>\n</script>\n```\n\n## Controlled\n\nTiDB.ai Widget will not create or bind a trigger element. Instead, you can listen to the custom event\n`tidbaiinitialized`, which indicates the `tidbai` object is ready on `Window` object.\n\n```js\nwindow.addEventListener('tidbaiinitialized', () => {\n  window.tidbai.open = true;\n})\n```\n\nThe widget script tag must has a `data-controlled` attribute to prevent create a trigger button.\n\n```html\n<script\n  async\n  src=\"{{YOUR_SITE}}/widget.js\"\n  data-api-base=\"{{YOUR_SITE}}\"\n  data-controlled=\"true\"\n>\n</script>\n```\n"
  },
  {
    "path": "frontend/packages/widget-react/index.html",
    "content": "<!doctype html>\n<html lang=\"en\">\n<head>\n  <meta charset=\"UTF-8\" />\n  <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n  <title>Vite + React + TS</title>\n  <style>\n    :root{\n      font-size: 88px;\n    }\n  </style>\n</head>\n<body>\nROOT FONT SIZE 88px\n<div id=\"tidb-ai-widget\" class=\"tidb-ai-widget\"></div>\n<script async src=\"https://www.googletagmanager.com/gtag/js?id=G-yyyyyyyy\"></script>\n<script>\n  window.dataLayer = window.dataLayer || [];\n\n  function gtag() {\n    dataLayer.push(arguments);\n  }\n\n  gtag('js', new Date());\n\n  gtag('config', 'G-yyyyyyyy');\n</script>\n<script>\n  window.addEventListener('tidbaiinitialized', ev => {\n    console.log(ev.detail)\n  })\n</script>\n<script async data-api-base=\"https://tidbai-dev.htapdb.com\" src=\"dist/widget.js\" data-chat-engine=\"default-copy\" data-measurement-id=\"G-ZZZZZZZZ\"></script>\n</body>\n</html>\n"
  },
  {
    "path": "frontend/packages/widget-react/package.json",
    "content": "{\n  \"name\": \"@tidb.ai/react\",\n  \"version\": \"0.0.1\",\n  \"type\": \"module\",\n  \"scripts\": {\n    \"dev\": \"vite\",\n    \"build\": \"vite build && cp dist/widget.js ../../app/public/\",\n    \"lint\": \"eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0\",\n    \"preview\": \"vite preview\"\n  },\n  \"dependencies\": {\n    \"react\": \"19.0.0\",\n    \"react-dom\": \"19.0.0\"\n  },\n  \"devDependencies\": {\n    \"@radix-ui/react-dialog\": \"^1.1.6\",\n    \"@tailwindcss/typography\": \"^0.5.13\",\n    \"@thedutchcoder/postcss-rem-to-px\": \"^0.0.2\",\n    \"@types/gtag.js\": \"^0.0.20\",\n    \"@types/react\": \"^19.0.1\",\n    \"@types/react-dom\": \"^19.0.2\",\n    \"@typescript-eslint/eslint-plugin\": \"^8.18.0\",\n    \"@typescript-eslint/parser\": \"^8.18.0\",\n    \"@vitejs/plugin-react-swc\": \"^3.7.2\",\n    \"autoprefixer\": \"^10.4.19\",\n    \"eslint\": \"^9.16.0\",\n    \"eslint-plugin-react-hooks\": \"^5.1.0\",\n    \"eslint-plugin-react-refresh\": \"^0.4.16\",\n    \"lucide-react\": \"^0.400.0\",\n    \"postcss\": \"^8\",\n    \"rollup-plugin-visualizer\": \"^5.12.0\",\n    \"sass\": \"^1.77.6\",\n    \"tailwindcss\": \"^3.4.15\",\n    \"tailwindcss-animate\": \"^1.0.7\",\n    \"tailwindcss-scoped-preflight\": \"^3.3.0\",\n    \"typescript\": \"^5.5.3\",\n    \"vite\": \"^6.0.3\",\n    \"vite-plugin-css-injected-by-js\": \"^3.5.1\"\n  }\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/postcss.config.js",
    "content": "export default {\n  plugins: {\n    tailwindcss: {},\n    '@thedutchcoder/postcss-rem-to-px': {},\n    autoprefixer: {},\n  },\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/src/Widget.css",
    "content": "@import \"../../../app/src/app/globals.css\";\n\n\n@layer base {\n  #tidb-ai-widget {\n    @apply bg-background text-foreground;\n  }\n}\n\n#tidb-ai-widget-trigger {\n  z-index: 9999;\n}\n\n#tidb-ai-widget {\n  font-size: 16px;\n  h1, h2, h3, h4, h5, h6, span, b, strong, em, i, code {\n    color: inherit;\n    background-color: inherit;\n  }\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/src/Widget.tsx",
    "content": "import type { BootstrapStatus } from '@/api/system';\nimport { ManualScrollVoter } from '@/components/auto-scroll';\nimport { AutoScroll } from '@/components/auto-scroll/auto-scroll';\nimport type { ChatController } from '@/components/chat/chat-controller';\nimport { ChatsProvider } from '@/components/chat/chat-hooks';\nimport { Conversation } from '@/components/chat/conversation';\nimport { useGtagFn } from '@/components/gtag-provider';\nimport { PortalProvider } from '@/components/portal-provider';\nimport { BootstrapStatusProvider } from '@/components/system/BootstrapStatusProvider';\nimport { Button } from '@/components/ui/button';\nimport { Dialog, DialogClose, DialogDescription, DialogHeader, DialogOverlay, DialogPortal, DialogTitle, DialogTrigger } from '@/components/ui/dialog';\nimport { ScrollArea } from '@/components/ui/scroll-area';\nimport { type ExperimentalFeatures, ExperimentalFeaturesProvider } from '@/experimental/experimental-features-provider';\nimport * as DialogPrimitive from '@radix-ui/react-dialog';\nimport { XIcon } from 'lucide-react';\nimport { forwardRef, useEffect, useImperativeHandle, useMemo, useRef, useState, useSyncExternalStore } from 'react';\nimport './Widget.css';\n\nexport interface WidgetProps {\n  trigger?: HTMLElement | true | null;\n  container: HTMLElement;\n  bootstrapStatus: BootstrapStatus;\n  experimentalFeatures: Partial<ExperimentalFeatures>;\n  exampleQuestions: string[];\n  buttonLabel: string;\n  buttonIcon: string;\n  icon: string;\n  disableAutoThemeDetect?: boolean;\n  chatEngine?: string;\n  apiBase?: string;\n  src: string;\n}\n\nexport interface WidgetInstance {\n  open: boolean;\n  dark: boolean;\n  initialized: true;\n}\n\nexport const Widget = forwardRef<WidgetInstance, WidgetProps>(({ container, trigger, experimentalFeatures, disableAutoThemeDetect = false, bootstrapStatus, exampleQuestions, icon, buttonIcon, buttonLabel, chatEngine, src }, ref) => {\n  const [open, setOpen] = useState(false);\n  const [dark, setDark] = useState(() => matchMedia('(prefers-color-scheme: dark)').matches);\n  const openRef = useRef(open);\n  const darkRef = useRef(dark);\n  const [scrollTarget, setScrollTarget] = useState<HTMLDivElement | null>(null);\n  const gtagFn = useGtagFn();\n  const shouldDisplayTrigger = useShouldDisplayTrigger(src);\n\n  useEffect(() => {\n    openRef.current = open;\n    darkRef.current = dark;\n  });\n\n  const toggleDark = (dark: boolean) => {\n    setDark(dark);\n  };\n\n  useEffect(() => {\n    if (disableAutoThemeDetect) {\n      return;\n    }\n    const match = matchMedia('(prefers-color-scheme: dark)');\n    const change = () => {\n      toggleDark(match.matches);\n    };\n    match.addEventListener('change', change);\n    change();\n    return () => {\n      match.removeEventListener('change', change);\n    };\n  }, [disableAutoThemeDetect]);\n\n  useEffect(() => {\n    if (disableAutoThemeDetect) {\n      return;\n    }\n    const mo = new MutationObserver(() => {\n      toggleDark(document.documentElement.classList.contains('dark'));\n    });\n    mo.observe(document.documentElement, {\n      attributes: true,\n      attributeFilter: ['class'],\n    });\n    toggleDark(document.documentElement.classList.contains('dark'));\n\n    return () => {\n      mo.disconnect();\n    };\n  }, [disableAutoThemeDetect]);\n\n  useEffect(() => {\n    if (dark) {\n      container.classList.add('dark');\n    } else {\n      container.classList.remove('dark');\n    }\n  }, [dark]);\n\n  useEffect(() => {\n    if (trigger && trigger !== true) {\n      const open = () => {\n        setOpen(true);\n        gtagFn('event', 'tidbai.events.open-widget-dialog');\n      };\n      trigger.addEventListener('click', open);\n      return () => {\n        trigger.removeEventListener('click', open);\n      };\n    }\n  }, [trigger]);\n\n  const newChatRef = useRef<ChatController['post'] | undefined>(undefined);\n\n  useImperativeHandle(ref, () => ({\n    get open () {\n      return openRef.current;\n    },\n    set open (o) {\n      if (o) {\n        gtagFn('event', 'tidbai.events.open-widget-dialog');\n      }\n      setOpen(o);\n    },\n    get dark () {\n      return darkRef.current;\n    },\n    set dark (d) {\n      setDark(d);\n    },\n    get initialized (): true { return true; },\n    newChat (content: string) {\n      newChatRef.current?.({\n        content,\n        chat_engine: chatEngine,\n      });\n    },\n  }), []);\n\n  return (\n    <PortalProvider container={container}>\n      <BootstrapStatusProvider bootstrapStatus={bootstrapStatus}>\n        <ExperimentalFeaturesProvider features={experimentalFeatures}>\n          <ChatsProvider>\n            <Dialog open={open} onOpenChange={(open) => {\n              setOpen(open);\n              if (!open) {\n                gtagFn('event', 'tidbai.events.close-widget-dialog');\n              }\n            }}>\n              {!trigger && shouldDisplayTrigger && <DialogTrigger asChild>\n                <Button id=\"tidb-ai-widget-trigger\" className=\"hidden sm:flex fixed right-8 bottom-8 gap-2 items-center\" onClick={() => {\n                  gtagFn('event', 'tidbai.events.open-widget-dialog');\n                }}>\n                  <img src={buttonIcon} alt=\"Logo\" className=\"size-4\" />\n                  <span>\n                  {buttonLabel}\n                </span>\n                </Button>\n              </DialogTrigger>}\n              <DialogPortal container={container}>\n                <DialogOverlay />\n                <DialogPrimitive.Content\n                  className=\"fixed left-[50%] top-[50%] z-50 grid translate-x-[-50%] translate-y-[-50%] gap-4 border bg-background p-0 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg w-[calc(100%-32px)] lg:w-[50vw] outline-none\">\n                  <DialogHeader className=\"relative p-8 pb-0\">\n                    <DialogClose className=\"absolute right-8 top-8 transition-opacity opacity-70 hover:opacity-100\">\n                      <XIcon className=\"size-4\" />\n                    </DialogClose>\n                    <DialogTitle className=\"flex items-center gap-4\">\n                      <img className=\"h-8\" src={icon} alt=\"logo\" height={32} />\n                      <span className=\"w-[1px] h-full py-2\">\n                        <span className=\"bg-border w-full h-full block\" />\n                      </span>\n                      <span>\n                        Ask AI\n                      </span>\n                    </DialogTitle>\n                    <DialogDescription className=\"sr-only\" />\n                  </DialogHeader>\n                  <AutoScroll target={scrollTarget} edgePixels={12}>\n                    <ManualScrollVoter />\n                    <ScrollArea viewportRef={setScrollTarget} className=\"relative h-[60vh] w-full\">\n                      <div className=\"w-[calc(100%-32px)] lg:w-[50vw]\">\n                        <Conversation\n                          className=\"w-full overflow-hidden md:max-w-[unset] md:min-h-[unset] md:p-0 [&>div>section]:pt-4 [&>div>section]:pb-0\"\n                          open\n                          chat={undefined}\n                          history={[]}\n                          placeholder={(myChat) => (\n                            <div className=\"mt-4 space-y-6 px-8\">\n                              <div className=\"font-medium text-lg\">Example questions:</div>\n                              <div className=\"flex gap-4 flex-wrap\">\n                                {exampleQuestions.map((question, index) => (\n                                  <Button\n                                    key={index}\n                                    variant=\"secondary\"\n                                    disabled={!!myChat.postState.params}\n                                    onClick={() => myChat.post({ content: question, chat_engine: chatEngine })}>\n                                    {question}\n                                  </Button>\n                                ))}\n                              </div>\n                            </div>\n                          )}\n                          preventMutateBrowserHistory\n                          preventShiftMessageInput\n                          newChatRef={newChatRef}\n                        />\n                      </div>\n                    </ScrollArea>\n                    <div className=\"text-muted-foreground text-xs p-8 pt-0\">\n                      Powered by <a className=\"underline\" href=\"https://github.com/pingcap/autoflow\" target=\"_blank\">github.com/pingcap/autoflow</a>.\n                    </div>\n                  </AutoScroll>\n                </DialogPrimitive.Content>\n              </DialogPortal>\n            </Dialog>\n          </ChatsProvider>\n        </ExperimentalFeaturesProvider>\n      </BootstrapStatusProvider>\n    </PortalProvider>\n  );\n});\n\n// Listen the browser state change to determine when to display the default trigger button.\nconst __pushState = history.pushState;\nconst __replaceState = history.replaceState;\n\nhistory.replaceState = (...params) => {\n  window.dispatchEvent(new CustomEvent('tidbaihistorychange', { detail: { type: 'replaceState', params } }));\n  __replaceState.call(history, ...params);\n};\n\nhistory.pushState = (...params) => {\n  window.dispatchEvent(new CustomEvent('tidbaihistorychange', { detail: { type: 'pushState', params } }));\n  __pushState.call(history, ...params);\n};\n\nwindow.addEventListener('popstate', (e) => {\n  window.dispatchEvent(new CustomEvent('tidbaihistorychange', { detail: { type: 'popstate', params: [e.state] } }));\n});\n\nfunction useShouldDisplayTrigger (src: string) {\n  const pathname = useSyncExternalStore(fire => {\n    const callback = () => {\n      setTimeout(() => {\n        fire();\n      }, 1);\n    };\n\n    window.addEventListener('tidbaihistorychange', callback);\n    return () => {\n      window.removeEventListener('tidbaihistorychange', callback);\n    };\n  }, () => window.location.pathname);\n\n  return useMemo(() => {\n    // if src is relative, assume is on main site.\n    if (src.startsWith('/')) {\n      return pathname === '/';\n    }\n    const srcUrl = new URL(src);\n\n    // if page's origin is same with script src, assume is on main site.\n    if (location.origin === srcUrl.origin) {\n      return pathname === '/';\n    }\n    return true;\n  }, [pathname, src]);\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/src/index.css",
    "content": "@import \"../../../app/src/app/globals.css\";\n"
  },
  {
    "path": "frontend/packages/widget-react/src/library.tsx",
    "content": "import { GtagProvider } from '@/components/gtag-provider';\nimport ReactDOM from 'react-dom/client';\nimport { loadConfig } from './load-config';\nimport { prepareGtag } from './prepare-gtag';\nimport { Widget, type WidgetInstance } from './Widget';\n\nconst script = document.currentScript as HTMLScriptElement;\nif (!script) {\n  throw new Error('Cannot locate document.currentScript');\n}\n\nconst src = script.src;\nconst controlled = script.dataset.controlled === 'true';\nconst trigger = controlled ? true : document.getElementById('tidb-ai-trigger');\nconst chatEngine = script.dataset.chatEngine;\nconst scriptDataMeasurementId = script.dataset.measurementId;\n\nloadConfig().then(async ({ settings, bootstrapStatus, experimentalFeatures }) => {\n  const measurementId = scriptDataMeasurementId ?? settings.ga_id;\n\n  const gtagFn = measurementId ? prepareGtag(measurementId) : undefined;\n  const div = document.createElement('div');\n\n  div.id = 'tidb-ai-widget';\n  div.className = 'tidb-ai-widget';\n  document.body.appendChild(div);\n\n  const refFn = (current: WidgetInstance) => {\n    Object.defineProperty(window, 'tidbai', {\n      writable: false,\n      value: current,\n    });\n    window.dispatchEvent(new CustomEvent('tidbaiinitialized', { detail: current }));\n  };\n\n  ReactDOM.createRoot(div).render(\n    <GtagProvider configured gtagFn={gtagFn} gtagId={measurementId}>\n      <Widget\n        ref={refFn}\n        src={src}\n        container={div}\n        trigger={trigger}\n        exampleQuestions={settings.custom_js_example_questions}\n        buttonLabel={settings.custom_js_button_label}\n        buttonIcon={settings.custom_js_button_img_src}\n        icon={settings.custom_js_logo_src}\n        bootstrapStatus={bootstrapStatus}\n        experimentalFeatures={experimentalFeatures}\n        chatEngine={chatEngine}\n      />\n    </GtagProvider>,\n  );\n}).catch((error) => {\n  console.error('Failed to initialize tidbai', error);\n  Object.defineProperty(window, 'tidbai', {\n    writable: false,\n    value: {\n      open: false,\n      dark: false,\n      initialized: false,\n      error,\n    },\n  });\n  window.dispatchEvent(new CustomEvent('tidbaierror', { detail: error }));\n});\n"
  },
  {
    "path": "frontend/packages/widget-react/src/load-config.ts",
    "content": "import { getPublicSiteSettings } from '@/api/site-settings';\nimport { getBootstrapStatus } from '@/api/system';\nimport type { ExperimentalFeatures } from '@/experimental/experimental-features-provider';\nimport { requestUrl } from '@/lib/request';\n\nexport async function loadConfig () {\n  const [settings, bootstrapStatus, experimentalFeatures] = await Promise.all([\n    getPublicSiteSettings().catch(error => {\n      console.error('Cannot initialize tidb.ai widget', error);\n      return Promise.reject(error);\n    }),\n    getBootstrapStatus().catch(error => {\n      console.error('TiDB.ai service not bootstrapped', error);\n      return Promise.reject(error);\n    }),\n    fetch(requestUrl(`/experimental-features`)).then(res => res.json() as Promise<Partial<ExperimentalFeatures>>),\n  ]);\n\n  if (!settings.enable_post_verifications || !settings.enable_post_verifications_for_widgets) {\n    experimentalFeatures.enable_message_post_verification = false;\n  }\n\n  return { settings, bootstrapStatus, experimentalFeatures };\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/src/overrides/README.md",
    "content": "The react widget reuses web app codebase. But some components may not buildable outside next env, this directory\nprovides some *polyfill* for some components.\n\nSee `vite.config.ts` for all overrides."
  },
  {
    "path": "frontend/packages/widget-react/src/overrides/components/code-theme.scss",
    "content": ".tidb-ai-widget#tidb-ai-widget {\n  pre code.hljs {\n    display: block;\n    overflow-x: auto;\n    padding: 1em\n  }\n\n  code.hljs {\n    padding: 3px 5px\n  }\n\n  /*!\n    Theme: GitHub\n    Description: Light theme as seen on github.com\n    Author: github.com\n    Maintainer: @Hirse\n    Updated: 2021-05-15\n\n    Outdated base version: https://github.com/primer/github-syntax-light\n    Current colors taken from GitHub's CSS\n  */\n  .hljs {\n    color: #24292e;\n    background: #ffffff\n  }\n\n  .hljs-doctag,\n  .hljs-keyword,\n  .hljs-meta .hljs-keyword,\n  .hljs-template-tag,\n  .hljs-template-variable,\n  .hljs-type,\n  .hljs-variable.language_ {\n    /* prettylights-syntax-keyword */\n    color: #d73a49\n  }\n\n  .hljs-title,\n  .hljs-title.class_,\n  .hljs-title.class_.inherited__,\n  .hljs-title.function_ {\n    /* prettylights-syntax-entity */\n    color: #6f42c1\n  }\n\n  .hljs-attr,\n  .hljs-attribute,\n  .hljs-literal,\n  .hljs-meta,\n  .hljs-number,\n  .hljs-operator,\n  .hljs-variable,\n  .hljs-selector-attr,\n  .hljs-selector-class,\n  .hljs-selector-id {\n    /* prettylights-syntax-constant */\n    color: #005cc5\n  }\n\n  .hljs-regexp,\n  .hljs-string,\n  .hljs-meta .hljs-string {\n    /* prettylights-syntax-string */\n    color: #032f62\n  }\n\n  .hljs-built_in,\n  .hljs-symbol {\n    /* prettylights-syntax-variable */\n    color: #e36209\n  }\n\n  .hljs-comment,\n  .hljs-code,\n  .hljs-formula {\n    /* prettylights-syntax-comment */\n    color: #6a737d\n  }\n\n  .hljs-name,\n  .hljs-quote,\n  .hljs-selector-tag,\n  .hljs-selector-pseudo {\n    /* prettylights-syntax-entity-tag */\n    color: #22863a\n  }\n\n  .hljs-subst {\n    /* prettylights-syntax-storage-modifier-import */\n    color: #24292e\n  }\n\n  .hljs-section {\n    /* prettylights-syntax-markup-heading */\n    color: #005cc5;\n    font-weight: bold\n  }\n\n  .hljs-bullet {\n    /* prettylights-syntax-markup-list */\n    color: #735c0f\n  }\n\n  .hljs-emphasis {\n    /* prettylights-syntax-markup-italic */\n    color: #24292e;\n    font-style: italic\n  }\n\n  .hljs-strong {\n    /* prettylights-syntax-markup-bold */\n    color: #24292e;\n    font-weight: bold\n  }\n\n  .hljs-addition {\n    /* prettylights-syntax-markup-inserted */\n    color: #22863a;\n    background-color: #f0fff4\n  }\n\n  .hljs-deletion {\n    /* prettylights-syntax-markup-deleted */\n    color: #b31d28;\n    background-color: #ffeef0\n  }\n\n  .hljs-char.escape_,\n  .hljs-link,\n  .hljs-params,\n  .hljs-property,\n  .hljs-punctuation,\n  .hljs-tag {\n    /* purposely ignored */\n\n  }\n\n  &.dark {\n    pre code.hljs {\n      display: block;\n      overflow-x: auto;\n      padding: 1em\n    }\n\n    code.hljs {\n      padding: 3px 5px\n    }\n\n    /*!\n      Theme: GitHub Dark\n      Description: Dark theme as seen on github.com\n      Author: github.com\n      Maintainer: @Hirse\n      Updated: 2021-05-15\n\n      Outdated base version: https://github.com/primer/github-syntax-dark\n      Current colors taken from GitHub's CSS\n    */\n    .hljs {\n      color: #c9d1d9;\n      background: #0d1117\n    }\n\n    .hljs-doctag,\n    .hljs-keyword,\n    .hljs-meta .hljs-keyword,\n    .hljs-template-tag,\n    .hljs-template-variable,\n    .hljs-type,\n    .hljs-variable.language_ {\n      /* prettylights-syntax-keyword */\n      color: #ff7b72\n    }\n\n    .hljs-title,\n    .hljs-title.class_,\n    .hljs-title.class_.inherited__,\n    .hljs-title.function_ {\n      /* prettylights-syntax-entity */\n      color: #d2a8ff\n    }\n\n    .hljs-attr,\n    .hljs-attribute,\n    .hljs-literal,\n    .hljs-meta,\n    .hljs-number,\n    .hljs-operator,\n    .hljs-variable,\n    .hljs-selector-attr,\n    .hljs-selector-class,\n    .hljs-selector-id {\n      /* prettylights-syntax-constant */\n      color: #79c0ff\n    }\n\n    .hljs-regexp,\n    .hljs-string,\n    .hljs-meta .hljs-string {\n      /* prettylights-syntax-string */\n      color: #a5d6ff\n    }\n\n    .hljs-built_in,\n    .hljs-symbol {\n      /* prettylights-syntax-variable */\n      color: #ffa657\n    }\n\n    .hljs-comment,\n    .hljs-code,\n    .hljs-formula {\n      /* prettylights-syntax-comment */\n      color: #8b949e\n    }\n\n    .hljs-name,\n    .hljs-quote,\n    .hljs-selector-tag,\n    .hljs-selector-pseudo {\n      /* prettylights-syntax-entity-tag */\n      color: #7ee787\n    }\n\n    .hljs-subst {\n      /* prettylights-syntax-storage-modifier-import */\n      color: #c9d1d9\n    }\n\n    .hljs-section {\n      /* prettylights-syntax-markup-heading */\n      color: #1f6feb;\n      font-weight: bold\n    }\n\n    .hljs-bullet {\n      /* prettylights-syntax-markup-list */\n      color: #f2cc60\n    }\n\n    .hljs-emphasis {\n      /* prettylights-syntax-markup-italic */\n      color: #c9d1d9;\n      font-style: italic\n    }\n\n    .hljs-strong {\n      /* prettylights-syntax-markup-bold */\n      color: #c9d1d9;\n      font-weight: bold\n    }\n\n    .hljs-addition {\n      /* prettylights-syntax-markup-inserted */\n      color: #aff5b4;\n      background-color: #033a16\n    }\n\n    .hljs-deletion {\n      /* prettylights-syntax-markup-deleted */\n      color: #ffdcd7;\n      background-color: #67060c\n    }\n\n    .hljs-char.escape_,\n    .hljs-link,\n    .hljs-params,\n    .hljs-property,\n    .hljs-punctuation,\n    .hljs-tag {\n      /* purposely ignored */\n    }\n  }\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/src/overrides/components/remark-content/style.scss",
    "content": ".tidb-ai-widget#tidb-ai-widget {\n  .remark-content {\n    sup a[data-footnote-ref=true] {\n      @apply bg-primary text-primary-foreground px-1 mx-0.5 rounded-full no-underline;\n    }\n\n    sup a[data-footnote-ref=true] + a[data-footnote-ref=true] {\n      @apply ml-0.5;\n    }\n\n    [data-footnote-backref] {\n      @apply sr-only;\n    }\n\n    pre:has(> code) {\n      position: relative;\n\n      > div[data-role=\"codeblock-addon\"] {\n        @apply text-foreground;\n      }\n    }\n\n    pre:has(> code.hljs) {\n      padding: 0;\n    }\n\n    pre > code.hljs {\n      background-color: #fafafa;\n      color: #24292e;\n    }\n\n    pre:has(> code:not(.hljs)) {\n      background: #fafafa;\n\n      > code {\n        color: #24292e;\n      }\n    }\n  }\n\n  &.dark .remark-content {\n    pre > code.hljs {\n      background-color: #18181b;\n      color: #c9d1d9;\n    }\n\n\n    pre:has(> code:not(.hljs)) {\n      background: #18181b;\n\n      > code {\n        color: #c9d1d9;\n      }\n    }\n  }\n}"
  },
  {
    "path": "frontend/packages/widget-react/src/prepare-gtag.ts",
    "content": "import type { GtagFn } from '@/components/gtag-provider';\n\nconst DATA_LAYER_NAME = '__tidbai_dataLayer';\n\nexport function prepareGtag (id: string) {\n  const dataLayer = window[DATA_LAYER_NAME] = window[DATA_LAYER_NAME] || [];\n\n  // IMPORTANT: gtag must take an IArguments as parameter.\n  // eslint-disable-next-line @typescript-eslint/no-unused-vars\n  const fn = function (..._args: unknown[]) {\n    // eslint-disable-next-line prefer-rest-params\n    dataLayer.push(arguments);\n  };\n\n  fn('js', new Date());\n  fn('config', id, { send_page_view: false });\n\n  // To ensure not polluting the host gtag environments, we use an internal data layer variable in the widget project.\n  // https://developers.google.com/tag-platform/tag-manager/datalayer#rename_the_data_layer\n  const script = document.createElement('script');\n  script.src = `https://www.googletagmanager.com/gtag/js?id=${id}&l=${DATA_LAYER_NAME}`;\n  script.async = true;\n\n  document.body.append(script);\n\n  return fn as GtagFn;\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/src/vite-env.d.ts",
    "content": "/// <reference types=\"vite/client\" />\n"
  },
  {
    "path": "frontend/packages/widget-react/tailwind.config.ts",
    "content": "import type { Config } from 'tailwindcss';\nimport animate from 'tailwindcss-animate';\nimport typography from '@tailwindcss/typography';\nimport { scopedPreflightStyles, isolateInsideOfContainer } from 'tailwindcss-scoped-preflight';\n\nconst config: Config = {\n  darkMode: ['class'],\n  corePlugins: {\n    preflight: false,\n  },\n  important: '#tidb-ai-widget',\n  content: [\n    './src/**/*.{ts,tsx}',\n    '../../app/src/components/chat/**/*.{ts,tsx}',\n    '../../app/src/components/ui/**/*.{ts,tsx}',\n    '../../app/src/components/remark-content/**/*.{ts,tsx}',\n    '../../app/src/experimental/chat-verify-service/**/*.{ts,tsx}',\n  ],\n  prefix: '',\n  theme: {\n    container: {\n      center: true,\n      padding: '2rem',\n      screens: {\n        '2xl': '1400px',\n      },\n    },\n    extend: {\n      minHeight: {\n        body: 'var(--body-height)',\n        content: 'var(--content-height)',\n      },\n      height: {\n        header: 'var(--header-height)',\n        body: 'var(--body-height)',\n        content: 'var(--content-height)',\n      },\n      width: {\n        side: 'var(--sidebar-width)',\n        content: 'var(--content-width)',\n      },\n      padding: {\n        body: 'var(--body-padding)',\n        side: 'var(--sidebar-width)',\n      },\n      margin: {\n        side: 'var(--sidebar-width)',\n      },\n      colors: {\n        border: 'hsl(var(--border))',\n        input: 'hsl(var(--input))',\n        ring: 'hsl(var(--ring))',\n        background: 'hsl(var(--background))',\n        foreground: 'hsl(var(--foreground))',\n        primary: {\n          DEFAULT: 'hsl(var(--primary))',\n          foreground: 'hsl(var(--primary-foreground))',\n        },\n        secondary: {\n          DEFAULT: 'hsl(var(--secondary))',\n          foreground: 'hsl(var(--secondary-foreground))',\n        },\n        destructive: {\n          DEFAULT: 'hsl(var(--destructive))',\n          foreground: 'hsl(var(--destructive-foreground))',\n        },\n        muted: {\n          DEFAULT: 'hsl(var(--muted))',\n          foreground: 'hsl(var(--muted-foreground))',\n        },\n        accent: {\n          DEFAULT: 'hsl(var(--accent))',\n          foreground: 'hsl(var(--accent-foreground))',\n        },\n        popover: {\n          DEFAULT: 'hsl(var(--popover))',\n          foreground: 'hsl(var(--popover-foreground))',\n        },\n        card: {\n          DEFAULT: 'hsl(var(--card))',\n          foreground: 'hsl(var(--card-foreground))',\n        },\n      },\n      borderRadius: {\n        lg: 'var(--radius)',\n        md: 'calc(var(--radius) - 2px)',\n        sm: 'calc(var(--radius) - 4px)',\n      },\n      keyframes: {\n        'accordion-down': {\n          from: { height: '0' },\n          to: { height: 'var(--radix-accordion-content-height)' },\n        },\n        'accordion-up': {\n          from: { height: 'var(--radix-accordion-content-height)' },\n          to: { height: '0' },\n        },\n        'fade-in-right': {\n          from: { transform: 'translate3d(30%, 0, 0)', opacity: '0' },\n          to: { transform: 'translate3d(0, 0, 0)', opacity: '1' },\n        },\n        'fade-in-left': {\n          from: { transform: 'translate3d(-30%, 0, 0)', opacity: '0' },\n          to: { transform: 'translate3d(0, 0, 0)', opacity: '1' },\n        },\n      },\n      animation: {\n        'accordion-down': 'accordion-down 0.2s ease-out',\n        'accordion-up': 'accordion-up 0.2s ease-out',\n        'fade-in-right': 'fade-in-right 0.5s ease-out',\n        'fade-in-left': 'fade-in-left 0.5s ease-out',\n      },\n    },\n  },\n  plugins: [\n    animate,\n    typography({ className: 'tidb-ai-widget#tidb-ai-widget .prose' }),\n    scopedPreflightStyles({\n      isolationStrategy: isolateInsideOfContainer('#tidb-ai-widget', {\n        except: '.no-twp', // optional, to exclude some elements under .twp from being preflighted, like external markup\n      }),\n    }),\n  ],\n};\n\nexport default config;\n"
  },
  {
    "path": "frontend/packages/widget-react/tsconfig.app.json",
    "content": "{\n  \"compilerOptions\": {\n    \"composite\": true,\n    \"tsBuildInfoFile\": \"./node_modules/.tmp/tsconfig.app.tsbuildinfo\",\n    \"target\": \"ES2020\",\n    \"useDefineForClassFields\": true,\n    \"lib\": [\"ES2020\", \"DOM\", \"DOM.Iterable\"],\n    \"module\": \"ESNext\",\n    \"skipLibCheck\": true,\n\n    /* Bundler mode */\n    \"moduleResolution\": \"bundler\",\n    \"allowImportingTsExtensions\": false,\n    \"resolveJsonModule\": true,\n    \"isolatedModules\": true,\n    \"moduleDetection\": \"force\",\n    \"noEmit\": true,\n    \"jsx\": \"react-jsx\",\n\n    /* Linting */\n    \"strict\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"noFallthroughCasesInSwitch\": true,\n\n    \"rootDir\": \"../..\",\n    \"paths\": {\n      \"@/*\": [\"../../app/src/*\"]\n    }\n  },\n  \"include\": [\"src\", \"../../app\"]\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/tsconfig.json",
    "content": "{\n  \"files\": [],\n  \"references\": [\n    {\n      \"path\": \"./tsconfig.app.json\"\n    },\n    {\n      \"path\": \"./tsconfig.node.json\"\n    }\n  ]\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/tsconfig.node.json",
    "content": "{\n  \"compilerOptions\": {\n    \"composite\": true,\n    \"tsBuildInfoFile\": \"./node_modules/.tmp/tsconfig.node.tsbuildinfo\",\n    \"skipLibCheck\": true,\n    \"module\": \"ESNext\",\n    \"moduleResolution\": \"bundler\",\n    \"allowSyntheticDefaultImports\": true,\n    \"strict\": true,\n    \"noEmit\": true\n  },\n  \"include\": [\"vite.config.ts\", \"tailwind.config.ts\"]\n}\n"
  },
  {
    "path": "frontend/packages/widget-react/vite.config.ts",
    "content": "import react from '@vitejs/plugin-react-swc';\nimport * as path from 'node:path';\nimport { visualizer } from 'rollup-plugin-visualizer';\nimport { defineConfig } from 'vite';\nimport injectCss from 'vite-plugin-css-injected-by-js';\n\nconst overriding = [\n  'components/remark-content/style.scss',\n  'components/code-theme/style.scss',\n];\n\nconst overridingPackages: string[] = [];\n\n// https://vitejs.dev/config/\nexport default defineConfig({\n  plugins: [\n    react(),\n    visualizer(),\n  ].concat(process.env.NODE_ENV === 'development' ? [] : [injectCss()]),\n  build: {\n    lib: {\n      entry: 'src/library.tsx',\n      formats: ['iife'],\n      name: '__this_name_should_never_exists_on_window__',\n      fileName: () => 'widget.js',\n    },\n    rollupOptions: {\n      output: {\n        inlineDynamicImports: true,\n      },\n    },\n  },\n  publicDir: '../../app/public',\n  resolve: {\n    conditions: ['tidbai-widget'],\n    alias: [\n      ...overriding.map(override => ({\n        find: path.join('@', override),\n        replacement: path.resolve(__dirname, `src/overrides`, override),\n      })),\n      ...overridingPackages.map(override => ({\n        find: override,\n        replacement: path.resolve(__dirname, `src/overrides`, override),\n      })),\n      {\n        find: '@',\n        replacement: path.resolve(__dirname, '../../app/src'),\n      },\n    ],\n  },\n  define: {\n    'process.env.BASE_URL': process.env.NODE_ENV === 'development' ? '\"https://tidbai-dev.htapdb.com\"' : '\"\"',\n    'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),\n    'process.env.NEXT_PUBLIC_DISABLE_DEBUG_PANEL': 'false',\n    'process.env.NEXT_PUBLIC_IS_WIDGET': 'true',\n    'process.env.__NEXT_ROUTER_BASEPATH': '\"\"',\n  },\n  server: {\n    port: 3000,\n    proxy: {\n      '/api': 'https://tidbai-dev.htapdb.com',\n    },\n  },\n});\n"
  },
  {
    "path": "frontend/patches/@jest__environment@29.7.0.patch",
    "content": "diff --git a/build/index.d.ts b/build/index.d.ts\nindex 1837e6ac01f7e6d73f7b82844b20e8c59f14fa77..9307322cb42d89dae39dec62a65b210c0a23df67 100644\n--- a/build/index.d.ts\n+++ b/build/index.d.ts\n@@ -191,6 +191,12 @@ export declare interface Jest {\n       virtual?: boolean;\n     },\n   ): Jest;\n+  /**\n+   * Mocks a module with the provided module factory when it is being imported.\n+   */\n+  unstable_unmockModule<T = unknown>(\n+    moduleName: string,\n+  ): Jest;\n   /**\n    * Wraps types of the `source` object and its deep members with type definitions\n    * of Jest mock function. Pass `{shallow: true}` option to disable the deeply\n"
  },
  {
    "path": "frontend/patches/jest-runtime@29.7.0.patch",
    "content": "diff --git a/build/index.js b/build/index.js\nindex 01cb4206525dab0e564e07366bb0de1cc68ae2d1..a3e97fedd7d591a6872ea2e34c994153157e4faf 100644\n--- a/build/index.js\n+++ b/build/index.js\n@@ -1943,6 +1943,16 @@ class Runtime {\n       this.setModuleMock(from, moduleName, mockFactory, options);\n       return jestObject;\n     };\n+    const unmockModule = (moduleName) => {\n+      const moduleID = this._resolver.getModuleID(\n+        this._virtualModuleMocks,\n+        from,\n+        moduleName,\n+        {conditions: this.esmConditions},\n+      );\n+      this._explicitShouldMockModule.set(moduleID, false);\n+      return jestObject;\n+    };\n     const clearAllMocks = () => {\n       this.clearAllMocks();\n       return jestObject;\n@@ -2165,6 +2175,7 @@ class Runtime {\n       spyOn,\n       unmock,\n       unstable_mockModule: mockModule,\n+      unstable_unmockModule: unmockModule,\n       useFakeTimers,\n       useRealTimers\n     };\n"
  },
  {
    "path": "frontend/pnpm-workspace.yaml",
    "content": "packages:\n  - app\n  - packages/*"
  }
]