[
  {
    "path": ".dockerignore",
    "content": "# Git\n.git/\n.gitignore\n.gitattributes\n\n# Python\n__pycache__/\n*.py[cod]\n*$py.class\n*.so\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n.pytest_cache/\n.coverage\n.coverage.*\n.cache\nhtmlcov/\n.tox/\n.nox/\n*.cover\n*.py,cover\n.hypothesis/\n.mypy_cache/\n.dmypy.json\ndmypy.json\n.pyre/\n.pytype/\n.ruff_cache/\n# uv.lock 需要保留（uv 构建需要）\n\n# Virtual environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n.envrc\n\n# Node.js / Frontend\n**/node_modules/\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\npnpm-debug.log*\n.pnpm-store/\nlerna-debug.log*\n.npm\n.eslintcache\n.stylelintcache\n.node_repl_history\n*.tsbuildinfo\n.yarn/\n.pnp.*\n\n# Frontend build outputs (will be built in Docker)\nfront/dist/\nfront/build/\napp/static/assets/\napp/static/index.html\napp/static/vite.svg\n\n# IDEs and editors\n.vscode/\n.idea/\n*.swp\n*.swo\n*~\n.DS_Store\nThumbs.db\n*.sublime-project\n*.sublime-workspace\n.project\n.classpath\n.c9/\n*.launch\n.settings/\n*.iml\n.cursorignore\n.cursorindexingignore\n\n# OS files\n.DS_Store\n.DS_Store?\n._*\n.Spotlight-V100\n.Trashes\nehthumbs.db\nDesktop.ini\n\n# Documentation\ndocs/\n*.md\n!README.md\nLICENSE\n\n# Testing\ncoverage/\n.nyc_output/\ntest/\ntests/\n*.test.js\n*.spec.js\n__tests__/\n\n# Logs\n*.log\nlogs/\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\npnpm-debug.log*\n\n# Temporary files\n*.tmp\n*.temp\n.tmp/\n.temp/\ntmp/\ntemp/\n\n# Local data (contains sensitive information)\ndata/\n/data/\n*.json\n!package.json\n!tsconfig*.json\n!components.json\n!app/locales/*.json\n\n# CI/CD\n.github/\n.gitlab-ci.yml\n.travis.yml\n.circleci/\nJenkinsfile\n\n# Docker files\nDockerfile*\ndocker-compose*.yml\n.dockerignore\n\n# Makefile and scripts\nMakefile\nscripts/\n\n# Python packaging files\nMANIFEST.in\nsetup.py\nsetup.cfg\n\n# Miscellaneous\n*.bak\n*.orig\n*.rej\n.cache/"
  },
  {
    "path": ".github/workflows/build-and-publish.yml",
    "content": "name: Build and Publish to PyPI\n\non:\n  release:\n    types: [published]\n  workflow_dispatch:\n    inputs:\n      publish_to_pypi:\n        description: \"Publish to PyPI\"\n        required: true\n        default: false\n        type: boolean\n      publish_to_test_pypi:\n        description: \"Publish to Test PyPI\"\n        required: true\n        default: false\n        type: boolean\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/checkout@v4\n        with:\n          submodules: recursive\n\n      - name: Set up Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: \"3.11\"\n\n      - name: Set up Node.js\n        uses: actions/setup-node@v4\n        with:\n          node-version: \"20\"\n\n      - name: Install pnpm\n        uses: pnpm/action-setup@v4\n        with:\n          version: 9\n\n      - name: Install Python dependencies\n        run: |\n          python -m pip install --upgrade pip\n          pip install build wheel\n\n      - name: Build frontend and wheel\n        run: |\n          python scripts/build_wheel.py\n\n      - name: Store the distribution packages\n        uses: actions/upload-artifact@v4\n        with:\n          name: python-package-distributions\n          path: dist/\n\n  publish-to-pypi:\n    name: Publish to PyPI\n    if: github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish_to_pypi == 'true')\n    needs:\n      - build\n    runs-on: ubuntu-latest\n\n    environment:\n      name: pypi\n      url: https://pypi.org/p/clove-proxy\n\n    permissions:\n      id-token: write\n\n    steps:\n      - name: Download all the dists\n        uses: actions/download-artifact@v4\n        with:\n          name: python-package-distributions\n          path: dist/\n\n      - name: Publish distribution to PyPI\n        uses: pypa/gh-action-pypi-publish@release/v1\n        with:\n          password: ${{ secrets.PYPI_API_TOKEN }}\n\n  publish-to-testpypi:\n    name: Publish to TestPyPI\n    if: github.event_name == 'workflow_dispatch' && github.event.inputs.publish_to_test_pypi == 'true'\n    needs:\n      - build\n    runs-on: ubuntu-latest\n\n    environment:\n      name: testpypi\n      url: https://test.pypi.org/p/clove-proxy\n\n    permissions:\n      id-token: write\n\n    steps:\n      - name: Download all the dists\n        uses: actions/download-artifact@v4\n        with:\n          name: python-package-distributions\n          path: dist/\n\n      - name: Publish distribution to TestPyPI\n        uses: pypa/gh-action-pypi-publish@release/v1\n        with:\n          repository-url: https://test.pypi.org/legacy/\n          password: ${{ secrets.TEST_PYPI_API_TOKEN }}\n"
  },
  {
    "path": ".github/workflows/docker-publish.yml",
    "content": "name: Docker Build and Push\n\non:\n  push:\n    branches:\n      - main\n    tags:\n      - \"v*\"\n  pull_request:\n    branches:\n      - main\n  workflow_dispatch:\n\nenv:\n  DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}\n  DOCKER_HUB_TOKEN: ${{ secrets.DOCKER_HUB_TOKEN }}\n  IMAGE_NAME: mirrorange/clove\n\njobs:\n  build-and-push:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n      packages: write\n      security-events: write\n\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@v4\n        with:\n          submodules: recursive\n\n      - name: Set up QEMU\n        uses: docker/setup-qemu-action@v3\n\n      - name: Set up Docker Buildx\n        uses: docker/setup-buildx-action@v3\n\n      - name: Log in to Docker Hub\n        if: github.event_name != 'pull_request'\n        uses: docker/login-action@v3\n        with:\n          username: ${{ env.DOCKER_HUB_USERNAME }}\n          password: ${{ env.DOCKER_HUB_TOKEN }}\n\n      - name: Extract metadata\n        id: meta\n        uses: docker/metadata-action@v5\n        with:\n          images: ${{ env.IMAGE_NAME }}\n          tags: |\n            type=ref,event=branch\n            type=ref,event=pr\n            type=semver,pattern={{version}}\n            type=semver,pattern={{major}}.{{minor}}\n            type=semver,pattern={{major}}\n            type=raw,value=latest,enable={{is_default_branch}}\n\n      - name: Build and push Docker image\n        uses: docker/build-push-action@v5\n        with:\n          context: .\n          platforms: linux/amd64,linux/arm64\n          push: ${{ github.event_name != 'pull_request' }}\n          tags: ${{ steps.meta.outputs.tags }}\n          labels: ${{ steps.meta.outputs.labels }}\n          cache-from: type=gha\n          cache-to: type=gha,mode=max\n\n      - name: Run security scan\n        if: github.event_name != 'pull_request'\n        uses: aquasecurity/trivy-action@master\n        with:\n          image-ref: ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}\n          format: \"sarif\"\n          output: \"trivy-results.sarif\"\n\n      - name: Upload Trivy scan results to GitHub Security tab\n        if: github.event_name != 'pull_request'\n        uses: github/codeql-action/upload-sarif@v3\n        with:\n          sarif_file: \"trivy-results.sarif\"\n"
  },
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[codz]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py.cover\n.hypothesis/\n.pytest_cache/\ncover/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\n.pybuilder/\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n#   For a library or package, you might want to ignore these files since the code is\n#   intended to run in multiple environments; otherwise, check them in:\n# .python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# UV\n#   Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.\n#   This is especially recommended for binary packages to ensure reproducibility, and is more\n#   commonly ignored for libraries.\n#uv.lock\n\n# poetry\n#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.\n#   This is especially recommended for binary packages to ensure reproducibility, and is more\n#   commonly ignored for libraries.\n#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control\n#poetry.lock\n#poetry.toml\n\n# pdm\n#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.\n#   pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.\n#   https://pdm-project.org/en/latest/usage/project/#working-with-version-control\n#pdm.lock\n#pdm.toml\n.pdm-python\n.pdm-build/\n\n# pixi\n#   Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.\n#pixi.lock\n#   Pixi creates a virtual environment in the .pixi directory, just like venv module creates one\n#   in the .venv directory. It is recommended not to include this directory in version control.\n.pixi\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.envrc\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# pytype static type analyzer\n.pytype/\n\n# Cython debug symbols\ncython_debug/\n\n# PyCharm\n#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can\n#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore\n#  and can be added to the global gitignore or merged into this file.  For a more nuclear\n#  option (not recommended) you can uncomment the following to ignore the entire idea folder.\n#.idea/\n\n# Abstra\n# Abstra is an AI-powered process automation framework.\n# Ignore directories containing user credentials, local state, and settings.\n# Learn more at https://abstra.io/docs\n.abstra/\n\n# Visual Studio Code\n#  Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore \n#  that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore\n#  and can be added to the global gitignore or merged into this file. However, if you prefer, \n#  you could uncomment the following to ignore the entire vscode folder\n# .vscode/\n\n# Ruff stuff:\n.ruff_cache/\n\n# PyPI configuration file\n.pypirc\n\n# Cursor\n#  Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to\n#  exclude from AI features like autocomplete and code analysis. Recommended for sensitive data\n#  refer to https://docs.cursor.com/context/ignore-files\n.cursorignore\n.cursorindexingignore\n\n# Marimo\nmarimo/_static/\nmarimo/_lsp/\n__marimo__/\n\n# Data\n/data/\n\n# Built frontend static files\napp/static/"
  },
  {
    "path": ".gitmodules",
    "content": "[submodule \"front\"]\n\tpath = front\n\turl = https://github.com/mirrorange/clove-front.git\n"
  },
  {
    "path": ".python-version",
    "content": "3.13\n"
  },
  {
    "path": "Dockerfile",
    "content": "# Multi-stage Dockerfile for Clove (uv version)\n\n# =============================================================================\n# Stage 1: Build frontend\n# =============================================================================\nFROM node:20-alpine AS frontend-builder\n\n# Install pnpm\nRUN corepack enable && corepack prepare pnpm@latest --activate\n\nWORKDIR /app/front\n\n# Copy frontend package files\nCOPY front/package.json front/pnpm-lock.yaml ./\n\n# Install dependencies\nRUN pnpm install --frozen-lockfile\n\n# Copy frontend source\nCOPY front/ ./\n\n# Build frontend\nRUN pnpm run build\n\n# =============================================================================\n# Stage 2: Build Python application with uv\n# =============================================================================\nFROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS app\n\n# uv optimization environment variables\nENV UV_COMPILE_BYTECODE=1 \\\n    UV_LINK_MODE=copy \\\n    UV_PYTHON_DOWNLOADS=0\n\nWORKDIR /app\n\n# Step 1: Copy dependency files only (leverage Docker layer caching)\nCOPY pyproject.toml uv.lock ./\n\n# Install dependencies (without installing the project itself)\n# --locked: Use lockfile for consistency\n# --no-install-project: Only install dependencies, not the project\n# --no-dev: Skip dev dependencies\n# --extra rnet --extra curl: Install optional dependency groups\nRUN --mount=type=cache,target=/root/.cache/uv \\\n    uv sync --locked --no-install-project --no-dev --extra rnet --extra curl\n\n# Step 2: Copy application code and README.md (required by pyproject.toml)\nCOPY app/ ./app/\nCOPY README.md ./\n\n# Step 3: Copy frontend build artifacts (required by pyproject.toml force-include)\nCOPY --from=frontend-builder /app/front/dist ./app/static\n\n# Step 4: Install the project itself\nRUN --mount=type=cache,target=/root/.cache/uv \\\n    uv sync --locked --no-dev --extra rnet --extra curl\n\n# Create data directory\nRUN mkdir -p /data\n\n# Activate virtual environment (add .venv/bin to PATH)\nENV PATH=\"/app/.venv/bin:$PATH\"\n\n# Environment variables\nENV DATA_FOLDER=/data \\\n    HOST=0.0.0.0 \\\n    PORT=5201\n\n# Expose port\nEXPOSE 5201\n\n# Reset ENTRYPOINT (uv image default is uv)\nENTRYPOINT []\n\n# Run the application\nCMD [\"python\", \"-m\", \"app.main\"]\n"
  },
  {
    "path": "Dockerfile.huggingface",
    "content": "# Simplified Dockerfile for Clove - For Huggingface Spaces\nFROM python:3.11-slim\n\nWORKDIR /app\n\n# Install clove-proxy from PyPI\nRUN pip install --no-cache-dir \"clove-proxy[rnet]\"\n\n# Environment variables\nENV NO_FILESYSTEM_MODE=true\nENV HOST=0.0.0.0\nENV PORT=${PORT:-7860}\n\n# Expose port\nEXPOSE ${PORT:-7860}\n\n# Run the application using the installed script\nCMD [\"clove\"]\n"
  },
  {
    "path": "Dockerfile.pypi",
    "content": "# Simplified Dockerfile for Clove - Install from PyPI\nFROM python:3.11-slim\n\nWORKDIR /app\n\n# Install clove-proxy from PyPI\nRUN pip install --no-cache-dir \"clove-proxy[rnet]\"\n\n# Create data directory\nRUN mkdir -p /data\n\n# Environment variables\nENV DATA_FOLDER=/data\nENV HOST=0.0.0.0\nENV PORT=${PORT:-5201}\n\n# Expose port\nEXPOSE ${PORT:-5201}\n\n# Run the application using the installed script\nCMD [\"clove\"]\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2025 orange\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "# Include all static files\nrecursive-include app/static *\n\n# Include locale files\nrecursive-include app/locales *.json\n\n# Include documentation\ninclude README.md\ninclude LICENSE\n\n# Exclude development files\nrecursive-exclude * __pycache__\nrecursive-exclude * *.py[co]\nrecursive-exclude * .DS_Store\nglobal-exclude *.log\nglobal-exclude *.tmp\nglobal-exclude .git*\n\n# Exclude test files\nrecursive-exclude tests *\nrecursive-exclude * test_*\n\n# Exclude frontend source\nrecursive-exclude front *\n\n# Exclude data directory\nrecursive-exclude data *\n"
  },
  {
    "path": "Makefile",
    "content": ".PHONY: help build build-frontend build-wheel install install-dev clean run test\n\n# Default target\nhelp:\n\t@echo \"Available commands:\"\n\t@echo \"  make build          - Build frontend and create Python wheel\"\n\t@echo \"  make build-frontend - Build only the frontend\"\n\t@echo \"  make build-wheel    - Build only the Python wheel\"\n\t@echo \"  make install        - Build and install the package\"\n\t@echo \"  make install-dev    - Install in development mode\"\n\t@echo \"  make clean          - Clean build artifacts\"\n\t@echo \"  make run            - Run the application (development)\"\n\t@echo \"  make test           - Run tests\"\n\n# Build everything\nbuild:\n\t@python scripts/build_wheel.py\n\n# Build only frontend\nbuild-frontend:\n\t@cd front && pnpm install && pnpm run build\n\t@rm -rf app/static\n\t@cp -r front/dist app/static\n\t@echo \"✓ Frontend build complete\"\n\n# Build only wheel\nbuild-wheel:\n\t@python scripts/build_wheel.py --skip-frontend\n\n# Build and install\ninstall: build\n\t@pip install dist/*.whl\n\t@echo \"✓ Clove installed successfully\"\n\t@echo \"Run 'clove' to start the application\"\n\n# Install in development mode\ninstall-dev:\n\t@pip install -e .\n\t@echo \"✓ Clove installed in development mode\"\n\n# Clean build artifacts\nclean:\n\t@rm -rf dist build *.egg-info\n\t@rm -rf app/__pycache__ app/**/__pycache__\n\t@rm -rf .pytest_cache .ruff_cache\n\t@find . -type f -name \"*.pyc\" -delete\n\t@find . -type f -name \"*.pyo\" -delete\n\t@echo \"✓ Cleaned build artifacts\"\n\n# Run the application (development mode)\nrun:\n\t@python -m app.main"
  },
  {
    "path": "README.md",
    "content": "# Clove 🍀\n\n<div align=\"center\">\n\n[![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)\n[![Python](https://img.shields.io/badge/python-3.13+-blue.svg)](https://www.python.org/downloads/)\n[![FastAPI](https://img.shields.io/badge/FastAPI-0.115+-green.svg)](https://fastapi.tiangolo.com)\n\n**全力以赴的 Claude 反向代理 ✨**\n\n[English](./README_en.md) | [简体中文](#)\n\n</div>\n\n## 🌟 这是什么？\n\nClove 是一个让你能够通过标准 Claude API 访问 Claude.ai 的反向代理工具。简单来说，它让各种 AI 应用都能连接上 Claude！\n\n**最大亮点**：Clove 是首个支持通过 OAuth 认证访问 Claude 官方 API 的反向代理（就是 Claude Code 用的那个）！这意味着你能享受到完整的 Claude API 功能，包括原生系统消息和预填充等高级特性。\n\n## 🚀 快速开始\n\n只需要三步，就能开始使用：\n\n### 1. 安装 Python\n\n确保你的电脑上有 Python 3.13 或更高版本\n\n### 2. 安装 Clove\n\n```bash\npip install \"clove-proxy[rnet]\"\n```\n\n### 3. 启动！\n\n```bash\nclove\n```\n\n启动后会在控制台显示一个随机生成的临时管理密钥。登录管理页面后别忘了添加自己的密钥哦！\n\n### 4. 配置账户\n\n打开浏览器访问：http://localhost:5201\n\n使用刚才的管理密钥登录，然后就可以添加你的 Claude 账户了～\n\n## ✨ 核心功能\n\n### 🔐 双模式运行\n\n- **OAuth 模式**：优先使用，可以访问 Claude API 的全部功能\n- **网页反代模式**：当 OAuth 不可用时自动切换，通过模拟 Claude.ai 网页版实现\n\n### 🎯 超高兼容性\n\n与其他反代工具（如 Clewd）相比，Clove 的兼容性非常出色：\n\n- ✅ 完全支持 SillyTavern\n- ✅ 支持绝大部分使用 Claude API 的应用\n- ✅ 甚至支持 Claude Code 本身！\n\n### 🛠️ 功能增强\n\n#### 对于 OAuth 模式\n\n- 完全访问 Claude API 的全部功能\n- 支持原生系统消息\n- 支持预填充功能\n- 性能更好，更稳定\n\n#### 对于 Claude.ai 网页反代模式\n\nClove 处理了 Claude.ai 网页版与 API 的各种差异：\n\n- 图片上传支持\n- 扩展思考（思维链）支持\n\n即使是通过网页反代，Clove 也能让你使用原本不支持的功能：\n\n- 工具调用（Function Calling）\n- 停止序列（Stop Sequences）\n- Token 计数（估算值）\n- 非流式传输\n\nClove 尽可能让 Claude.ai 网页反代更接近 API，以期在所有应用程序中获得无缝体验。\n\n### 🎨 友好的管理界面\n\n- 现代化的 Web 管理界面\n- 无需编辑配置文件\n- 所有设置都能在管理页面上完成\n- 自动管理用户配额和状态\n\n### 🔄 智能功能\n\n- **自动 OAuth 认证**：通过 Cookie 自动完成，无需手动登录 Claude Code\n- **智能切换**：自动在 OAuth 和 Claude.ai 网页反代之间切换\n- **配额管理**：超出配额时自动标记并在重置时恢复\n\n## ⚠️ 局限性\n\n### 1. Android Termux 用户注意\n\nClove 依赖 `curl_cffi` 来请求 claude.ai，但这个依赖无法在 Termux 上运行。\n\n**解决方案**：\n\n- 使用不含 curl_cffi 的版本：`pip install clove-proxy`\n  - ✅ 通过 OAuth 访问 Claude API（需要在管理页面手动完成认证）\n  - ❌ 无法使用网页反代功能\n  - ❌ 无法自动完成 OAuth 认证\n- 使用反向代理/镜像（如 fuclaude）\n  - ✅ 可以使用全部功能\n  - ❌ 需要额外的服务器（既然有搭建镜像的服务器，为什么要在 Termux 上部署呢 www）\n\n### 2. 工具调用限制\n\n如果你使用网页反代模式，避免接入会**大量并行执行工具调用**的应用。\n\n- Clove 需要保持与 Claude.ai 的连接等待工具调用结果\n- 过多并行调用会耗尽连接导致失败\n- OAuth 模式不受此限制\n\n### 3. 提示结构限制\n\n当 Clove 使用网页反代时，Claude.ai 会在提示中添加额外的系统提示词和文件上传结构。当使用对结构要求高的提示词（如 RP 预设）时：\n\n- 你可以预估请求将通过何种方式进行。在默认配置下：\n  - 使用 Free 账户时，所有请求通过 Claude.ai 网页反代\n  - 使用 Pro/Max 账户时，所有请求通过 Claude API 进行\n  - 若存在多账户，Clove 始终优先使用可访问该模型 API 的账户\n- 请选择与请求方式兼容的提示词\n\n## 🔧 高级配置\n\n### 环境变量\n\n虽然大部分配置都能在管理界面完成，但你也可以通过环境变量进行设置：\n\n```bash\n# 端口配置\nPORT=5201\n\n# 管理密钥（不设置则自动生成）\nADMIN_API_KEYS==your-secret-key\n\n# Claude.ai Cookie\nCOOKIES=sessionKey=your-session-key\n```\n\n更多配置请见 `.env.example` 文件。\n\n### API 使用\n\n配置完成后，你可以像使用标准 Claude API 一样使用 Clove：\n\n```python\nimport anthropic\n\nclient = anthropic.Anthropic(\n    base_url=\"http://localhost:5201\",\n    api_key=\"your-api-key\"  # 在管理界面创建\n)\n\nresponse = client.messages.create(\n    model=\"claude-opus-4-20250514\",\n    messages=[{\"role\": \"user\", \"content\": \"Hello, Claude!\"}],\n    max_tokens=1024,\n)\n```\n\n## 🤝 贡献\n\n欢迎贡献代码！如果你有好的想法或发现了问题：\n\n1. Fork 这个项目\n2. 创建你的功能分支 (`git checkout -b feature/AmazingFeature`)\n3. 提交你的修改 (`git commit -m 'Add some AmazingFeature'`)\n4. 推送到分支 (`git push origin feature/AmazingFeature`)\n5. 开一个 Pull Request\n\n## 📄 许可证\n\n本项目采用 MIT 许可证 - 查看 [LICENSE](LICENSE) 文件了解详情。\n\n## 🙏 致谢\n\n- [Anthropic Claude](https://www.anthropic.com/claude) - ~~可爱的小克~~ 强大的 AI 助手\n- [Clewd](https://github.com/teralomaniac/clewd/) - 初代 Claude.ai 反向代理\n- [ClewdR](https://github.com/Xerxes-2/clewdr) - 高性能 Claude.ai 反向代理\n- [FastAPI](https://fastapi.tiangolo.com/) - 现代、快速的 Web 框架\n- [Tailwind CSS](https://tailwindcss.com/) - CSS 框架\n- [Shadcn UI](https://ui.shadcn.com/) - 现代化的 UI 组件库\n- [Vite](https://vitejs.dev/) - 现代化的前端构建工具\n- [React](https://reactjs.org/) - JavaScript 库\n\n## ⚠️ 免责声明\n\n本项目仅供学习和研究使用。使用本项目时，请遵守相关服务的使用条款。作者不对任何滥用或违反服务条款的行为负责。\n\n## 📮 联系方式\n\n如有问题或建议，欢迎通过以下方式联系：\n\n- 提交 [Issue](https://github.com/mirrorange/clove/issues)\n- 发送 Pull Request\n- 发送邮件至：orange@freesia.ink\n\n## 🌸 关于 Clove\n\n丁香，桃金娘科蒲桃属植物，是一种常见的香料，也可用作中药。丁香（Clove）与丁香花（Syringa）是两种不同的植物哦~在本项目中，Clove 更接近 Claude 和 love 的合成词呢！\n\n---\n\n<div align=\"center\">\nMade with ❤️ by 🍊\n</div>\n"
  },
  {
    "path": "README_en.md",
    "content": "# Clove 🍀\n\n<div align=\"center\">\n\n[![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)\n[![Python](https://img.shields.io/badge/python-3.13+-blue.svg)](https://www.python.org/downloads/)\n[![FastAPI](https://img.shields.io/badge/FastAPI-0.115+-green.svg)](https://fastapi.tiangolo.com)\n\n**The all-in-one Claude reverse proxy ✨**\n\n[English](#) | [简体中文](./README.md)\n\n</div>\n\n## 🌟 What is this?\n\nClove is a reverse proxy tool that lets you access Claude.ai through a standard API. In simple terms, it allows various AI applications to connect to Claude!\n\n**The biggest highlight**: Clove is the first reverse proxy to support accessing Claude's official API through OAuth authentication (the same one Claude Code uses)! This means you get the full Claude API experience, including advanced features like native system messages and prefilling.\n\n## 🚀 Quick Start\n\nJust three steps to get started:\n\n### 1. Install Python\n\nMake sure you have Python 3.13 or higher on your computer\n\n### 2. Install Clove\n\n```bash\npip install \"clove-proxy[rnet]\"\n```\n\n### 3. Launch!\n\n```bash\nclove\n```\n\nAfter starting, you'll see a randomly generated temporary admin key in the console. Don't forget to add your own key after logging into the admin panel!\n\n### 4. Configure Your Account\n\nOpen your browser and go to: http://localhost:5201\n\nLog in with the admin key from earlier, then you can add your Claude account~\n\n## ✨ Core Features\n\n### 🔐 Dual Mode Operation\n\n- **OAuth Mode**: Preferred method, gives you access to all Claude API features\n- **Web Proxy Mode**: Automatically switches when OAuth is unavailable, works by emulating the Claude.ai web interface\n\n### 🎯 Outstanding Compatibility\n\nCompared to other proxy tools (like Clewd), Clove offers exceptional compatibility:\n\n- ✅ Full support for SillyTavern\n- ✅ Works with most applications that use the Claude API\n- ✅ Even supports Claude Code itself!\n\n### 🛠️ Enhanced Features\n\n#### For OAuth Mode\n\n- Complete access to all Claude API features\n- Native system message support\n- Prefilling support\n- Better performance and stability\n\n#### For Claude.ai Web Proxy Mode\n\nClove handles all the differences between Claude.ai web version and the API:\n\n- Image upload support\n- Extended thinking (chain of thought) support\n\nEven through web proxy, Clove enables features that weren't originally supported:\n\n- Function Calling\n- Stop Sequences\n- Token counting (estimated)\n- Non-streaming responses\n\nClove strives to make the Claude.ai web proxy as API-like as possible for a seamless experience across all applications.\n\n### 🎨 Friendly Admin Interface\n\n- Modern web management interface\n- No need to edit config files\n- All settings can be configured in the admin panel\n- Automatic user quota and status management\n\n### 🔄 Smart Features\n\n- **Automatic OAuth Authentication**: Completed automatically through cookies, no manual Claude Code login needed\n- **Intelligent Switching**: Automatically switches between OAuth and Claude.ai web proxy\n- **Quota Management**: Automatically flags when quota is exceeded and restores when reset\n\n## ⚠️ Limitations\n\n### 1. Android Termux Users Note\n\nClove depends on `curl_cffi` to request claude.ai, but this dependency doesn't work on Termux.\n\n**Solutions**:\n\n- Use the version without curl_cffi: `pip install clove-proxy`\n  - ✅ Access Claude API through OAuth (requires manual authentication in admin panel)\n  - ❌ Cannot use web proxy features\n  - ❌ Cannot auto-complete OAuth authentication\n- Use a reverse proxy/mirror (like fuclaude)\n  - ✅ Can use all features\n  - ❌ Requires an additional server (but if you have a server for mirroring, why deploy on Termux? lol)\n\n### 2. Tool Calling Limitations\n\nIf you're using web proxy mode, avoid connecting applications that perform **many parallel tool calls**.\n\n- Clove needs to maintain connections with Claude.ai while waiting for tool call results\n- Too many parallel calls will exhaust connections and cause failures\n- OAuth mode is not affected by this limitation\n\n### 3. Prompt Structure Limitations\n\nWhen Clove uses web proxy, Claude.ai adds extra system prompts and file upload structures to your prompts. When using prompts with strict structural requirements (like RP presets):\n\n- You can predict which method your request will use. With default settings:\n  - Free accounts: All requests go through Claude.ai web proxy\n  - Pro/Max accounts: All requests use Claude API\n  - With multiple accounts, Clove always prioritizes accounts with API access for the requested model\n- Choose prompts compatible with your request method\n\n## 🔧 Advanced Configuration\n\n### Environment Variables\n\nWhile most settings can be configured in the admin interface, you can also use environment variables:\n\n```bash\n# Port configuration\nPORT=5201\n\n# Admin key (auto-generated if not set)\nADMIN_API_KEYS=your-secret-key\n\n# Claude.ai Cookie\nCOOKIES=sessionKey=your-session-key\n```\n\nSee `.env.example` for more configuration options.\n\n### API Usage\n\nOnce configured, you can use Clove just like the standard Claude API:\n\n```python\nimport anthropic\n\nclient = anthropic.Anthropic(\n    base_url=\"http://localhost:5201\",\n    api_key=\"your-api-key\"  # Create this in the admin panel\n)\n\nresponse = client.messages.create(\n    model=\"claude-opus-4-20250514\",\n    messages=[{\"role\": \"user\", \"content\": \"Hello, Claude!\"}],\n    max_tokens=1024,\n)\n```\n\n## 🤝 Contributing\n\nContributions are welcome! If you have great ideas or found issues:\n\n1. Fork this project\n2. Create your feature branch (`git checkout -b feature/AmazingFeature`)\n3. Commit your changes (`git commit -m 'Add some AmazingFeature'`)\n4. Push to the branch (`git push origin feature/AmazingFeature`)\n5. Open a Pull Request\n\n## 📄 License\n\nThis project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.\n\n## 🙏 Acknowledgments\n\n- [Anthropic Claude](https://www.anthropic.com/claude) - ~~Adorable little Claude~~ Powerful AI assistant\n- [Clewd](https://github.com/teralomaniac/clewd/) - The original Claude.ai reverse proxy\n- [ClewdR](https://github.com/Xerxes-2/clewdr) - High-performance Claude.ai reverse proxy\n- [FastAPI](https://fastapi.tiangolo.com/) - Modern, fast web framework\n- [Tailwind CSS](https://tailwindcss.com/) - CSS framework\n- [Shadcn UI](https://ui.shadcn.com/) - Modern UI component library\n- [Vite](https://vitejs.dev/) - Modern frontend build tool\n- [React](https://reactjs.org/) - JavaScript library\n\n## ⚠️ Disclaimer\n\nThis project is for learning and research purposes only. When using this project, please comply with the terms of service of the relevant services. The author is not responsible for any misuse or violations of service terms.\n\n## 📮 Contact\n\nIf you have questions or suggestions, feel free to reach out:\n\n- Submit an [Issue](https://github.com/mirrorange/clove/issues)\n- Send a Pull Request\n- Email: orange@freesia.ink\n\n## 🌸 About Clove\n\nClove is a plant from the Myrtaceae family's Syzygium genus, commonly used as a spice and in traditional medicine. Clove (丁香, the spice) and lilac flowers (丁香花, Syringa) are two different plants! In this project, the name Clove is actually a blend of \"Claude\" and \"love\"!\n\n---\n\n<div align=\"center\">\nMade with ❤️ by 🍊\n</div>\n"
  },
  {
    "path": "app/__init__.py",
    "content": "__version__ = \"0.1.0\"\n"
  },
  {
    "path": "app/api/__init__.py",
    "content": ""
  },
  {
    "path": "app/api/main.py",
    "content": "from fastapi import APIRouter\nfrom app.api.routes import claude, accounts, settings, statistics\n\napi_router = APIRouter()\n\napi_router.include_router(claude.router, prefix=\"/v1\", tags=[\"Claude API\"])\napi_router.include_router(\n    accounts.router, prefix=\"/api/admin/accounts\", tags=[\"Account Management\"]\n)\napi_router.include_router(\n    settings.router, prefix=\"/api/admin/settings\", tags=[\"Settings Management\"]\n)\napi_router.include_router(\n    statistics.router, prefix=\"/api/admin/statistics\", tags=[\"Statistics\"]\n)\n"
  },
  {
    "path": "app/api/routes/accounts.py",
    "content": "from typing import List, Optional\nfrom fastapi import APIRouter, HTTPException\nfrom pydantic import BaseModel, Field\nfrom uuid import UUID\nimport time\n\nfrom app.core.exceptions import OAuthExchangeError\nfrom app.dependencies.auth import AdminAuthDep\nfrom app.services.account import account_manager\nfrom app.core.account import AuthType, AccountStatus, OAuthToken\nfrom app.services.oauth import oauth_authenticator\n\n\nclass OAuthTokenCreate(BaseModel):\n    access_token: str\n    refresh_token: str\n    expires_at: float\n\n\nclass AccountCreate(BaseModel):\n    cookie_value: Optional[str] = None\n    oauth_token: Optional[OAuthTokenCreate] = None\n    organization_uuid: Optional[UUID] = None\n    capabilities: Optional[List[str]] = None\n\n\nclass AccountUpdate(BaseModel):\n    cookie_value: Optional[str] = None\n    oauth_token: Optional[OAuthTokenCreate] = None\n    capabilities: Optional[List[str]] = None\n    status: Optional[AccountStatus] = None\n\n\nclass OAuthCodeExchange(BaseModel):\n    organization_uuid: UUID\n    code: str\n    pkce_verifier: str\n    capabilities: Optional[List[str]] = None\n\n\nclass AccountResponse(BaseModel):\n    organization_uuid: str\n    capabilities: Optional[List[str]]\n    cookie_value: Optional[str] = Field(None, description=\"Masked cookie value\")\n    status: AccountStatus\n    auth_type: AuthType\n    is_pro: bool\n    is_max: bool\n    has_oauth: bool\n    last_used: str\n    resets_at: Optional[str] = None\n\n\nrouter = APIRouter()\n\n\n@router.get(\"\", response_model=List[AccountResponse])\nasync def list_accounts(_: AdminAuthDep):\n    \"\"\"List all accounts.\"\"\"\n    accounts = []\n\n    for org_uuid, account in account_manager._accounts.items():\n        accounts.append(\n            AccountResponse(\n                organization_uuid=org_uuid,\n                capabilities=account.capabilities,\n                cookie_value=account.cookie_value[:20] + \"...\"\n                if account.cookie_value\n                else None,\n                status=account.status,\n                auth_type=account.auth_type,\n                is_pro=account.is_pro,\n                is_max=account.is_max,\n                has_oauth=account.oauth_token is not None,\n                last_used=account.last_used.isoformat(),\n                resets_at=account.resets_at.isoformat() if account.resets_at else None,\n            )\n        )\n\n    return accounts\n\n\n@router.get(\"/{organization_uuid}\", response_model=AccountResponse)\nasync def get_account(organization_uuid: str, _: AdminAuthDep):\n    \"\"\"Get a specific account by organization UUID.\"\"\"\n    if organization_uuid not in account_manager._accounts:\n        raise HTTPException(status_code=404, detail=\"Account not found\")\n\n    account = account_manager._accounts[organization_uuid]\n\n    return AccountResponse(\n        organization_uuid=organization_uuid,\n        capabilities=account.capabilities,\n        cookie_value=account.cookie_value[:20] + \"...\"\n        if account.cookie_value\n        else None,\n        status=account.status,\n        auth_type=account.auth_type,\n        is_pro=account.is_pro,\n        is_max=account.is_max,\n        has_oauth=account.oauth_token is not None,\n        last_used=account.last_used.isoformat(),\n        resets_at=account.resets_at.isoformat() if account.resets_at else None,\n    )\n\n\n@router.post(\"\", response_model=AccountResponse)\nasync def create_account(account_data: AccountCreate, _: AdminAuthDep):\n    \"\"\"Create a new account.\"\"\"\n    oauth_token = None\n    if account_data.oauth_token:\n        oauth_token = OAuthToken(\n            access_token=account_data.oauth_token.access_token,\n            refresh_token=account_data.oauth_token.refresh_token,\n            expires_at=account_data.oauth_token.expires_at,\n        )\n\n    account = await account_manager.add_account(\n        cookie_value=account_data.cookie_value,\n        oauth_token=oauth_token,\n        organization_uuid=str(account_data.organization_uuid),\n        capabilities=account_data.capabilities,\n    )\n\n    return AccountResponse(\n        organization_uuid=account.organization_uuid,\n        capabilities=account.capabilities,\n        cookie_value=account.cookie_value[:20] + \"...\"\n        if account.cookie_value\n        else None,\n        status=account.status,\n        auth_type=account.auth_type,\n        is_pro=account.is_pro,\n        is_max=account.is_max,\n        has_oauth=account.oauth_token is not None,\n        last_used=account.last_used.isoformat(),\n        resets_at=account.resets_at.isoformat() if account.resets_at else None,\n    )\n\n\n@router.put(\"/{organization_uuid}\", response_model=AccountResponse)\nasync def update_account(\n    organization_uuid: str, account_data: AccountUpdate, _: AdminAuthDep\n):\n    \"\"\"Update an existing account.\"\"\"\n    if organization_uuid not in account_manager._accounts:\n        raise HTTPException(status_code=404, detail=\"Account not found\")\n\n    account = account_manager._accounts[organization_uuid]\n\n    # Update fields if provided\n    if account_data.cookie_value is not None:\n        # Remove old cookie mapping if exists\n        if (\n            account.cookie_value\n            and account.cookie_value in account_manager._cookie_to_uuid\n        ):\n            del account_manager._cookie_to_uuid[account.cookie_value]\n\n        account.cookie_value = account_data.cookie_value\n        account_manager._cookie_to_uuid[account_data.cookie_value] = organization_uuid\n\n    if account_data.oauth_token is not None:\n        account.oauth_token = OAuthToken(\n            access_token=account_data.oauth_token.access_token,\n            refresh_token=account_data.oauth_token.refresh_token,\n            expires_at=account_data.oauth_token.expires_at,\n        )\n        # Update auth type based on what's available\n        if account.cookie_value and account.oauth_token:\n            account.auth_type = AuthType.BOTH\n        elif account.oauth_token:\n            account.auth_type = AuthType.OAUTH_ONLY\n        else:\n            account.auth_type = AuthType.COOKIE_ONLY\n\n    if account_data.capabilities is not None:\n        account.capabilities = account_data.capabilities\n\n    if account_data.status is not None:\n        account.status = account_data.status\n        if account.status == AccountStatus.VALID:\n            account.resets_at = None\n\n    # Save changes\n    account_manager.save_accounts()\n\n    return AccountResponse(\n        organization_uuid=organization_uuid,\n        capabilities=account.capabilities,\n        cookie_value=account.cookie_value[:20] + \"...\"\n        if account.cookie_value\n        else None,\n        status=account.status,\n        auth_type=account.auth_type,\n        is_pro=account.is_pro,\n        is_max=account.is_max,\n        has_oauth=account.oauth_token is not None,\n        last_used=account.last_used.isoformat(),\n        resets_at=account.resets_at.isoformat() if account.resets_at else None,\n    )\n\n\n@router.delete(\"/{organization_uuid}\")\nasync def delete_account(organization_uuid: str, _: AdminAuthDep):\n    \"\"\"Delete an account.\"\"\"\n    if organization_uuid not in account_manager._accounts:\n        raise HTTPException(status_code=404, detail=\"Account not found\")\n\n    await account_manager.remove_account(organization_uuid)\n\n    return {\"message\": \"Account deleted successfully\"}\n\n\n@router.post(\"/oauth/exchange\", response_model=AccountResponse)\nasync def exchange_oauth_code(exchange_data: OAuthCodeExchange, _: AdminAuthDep):\n    \"\"\"Exchange OAuth authorization code for tokens and create account.\"\"\"\n    # Exchange code for tokens\n    token_data = await oauth_authenticator.exchange_token(\n        exchange_data.code, exchange_data.pkce_verifier\n    )\n\n    if not token_data:\n        raise OAuthExchangeError()\n\n    # Create OAuth token object\n    oauth_token = OAuthToken(\n        access_token=token_data[\"access_token\"],\n        refresh_token=token_data[\"refresh_token\"],\n        expires_at=time.time() + token_data[\"expires_in\"],\n    )\n\n    # Create account with OAuth token\n    account = await account_manager.add_account(\n        oauth_token=oauth_token,\n        organization_uuid=str(exchange_data.organization_uuid),\n        capabilities=exchange_data.capabilities,\n    )\n\n    return AccountResponse(\n        organization_uuid=account.organization_uuid,\n        capabilities=account.capabilities,\n        cookie_value=None,\n        status=account.status,\n        auth_type=account.auth_type,\n        is_pro=account.is_pro,\n        is_max=account.is_max,\n        has_oauth=True,\n        last_used=account.last_used.isoformat(),\n        resets_at=account.resets_at.isoformat() if account.resets_at else None,\n    )\n"
  },
  {
    "path": "app/api/routes/claude.py",
    "content": "from fastapi import APIRouter, Request\nfrom fastapi.responses import StreamingResponse, JSONResponse\nfrom tenacity import (\n    retry,\n    retry_if_exception,\n    stop_after_attempt,\n    wait_fixed,\n)\n\nfrom app.core.config import settings\nfrom app.core.exceptions import NoResponseError\nfrom app.dependencies.auth import AuthDep\nfrom app.models.claude import MessagesAPIRequest\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.processors.claude_ai.pipeline import ClaudeAIPipeline\nfrom app.utils.retry import is_retryable_error, log_before_sleep\n\nrouter = APIRouter()\n\n\n@router.post(\"/messages\", response_model=None)\n@retry(\n    retry=retry_if_exception(is_retryable_error),\n    stop=stop_after_attempt(settings.retry_attempts),\n    wait=wait_fixed(settings.retry_interval),\n    before_sleep=log_before_sleep,\n    reraise=True,\n)\nasync def create_message(\n    request: Request, messages_request: MessagesAPIRequest, _: AuthDep\n) -> StreamingResponse | JSONResponse:\n    context = ClaudeAIContext(\n        original_request=request,\n        messages_api_request=messages_request,\n    )\n\n    context = await ClaudeAIPipeline().process(context)\n\n    if not context.response:\n        raise NoResponseError()\n\n    return context.response\n"
  },
  {
    "path": "app/api/routes/settings.py",
    "content": "import os\nimport json\nfrom typing import List\nfrom fastapi import APIRouter, HTTPException\nfrom pydantic import BaseModel, HttpUrl\n\nfrom app.dependencies.auth import AdminAuthDep\nfrom app.core.config import Settings, settings\n\n\nclass SettingsRead(BaseModel):\n    \"\"\"Model for returning settings.\"\"\"\n\n    api_keys: List[str]\n    admin_api_keys: List[str]\n\n    proxy_url: str | None\n\n    claude_ai_url: HttpUrl\n    claude_api_baseurl: HttpUrl\n\n    custom_prompt: str | None\n    use_real_roles: bool\n    human_name: str\n    assistant_name: str\n    padtxt_length: int\n    allow_external_images: bool\n\n    preserve_chats: bool\n\n    oauth_client_id: str\n    oauth_authorize_url: str\n    oauth_token_url: str\n    oauth_redirect_uri: str\n\n\nclass SettingsUpdate(BaseModel):\n    \"\"\"Model for updating settings.\"\"\"\n\n    api_keys: List[str] | None = None\n    admin_api_keys: List[str] | None = None\n\n    proxy_url: str | None = None\n\n    claude_ai_url: HttpUrl | None = None\n    claude_api_baseurl: HttpUrl | None = None\n\n    custom_prompt: str | None = None\n    use_real_roles: bool | None = None\n    human_name: str | None = None\n    assistant_name: str | None = None\n    padtxt_length: int | None = None\n    allow_external_images: bool | None = None\n\n    preserve_chats: bool | None = None\n\n    oauth_client_id: str | None = None\n    oauth_authorize_url: str | None = None\n    oauth_token_url: str | None = None\n    oauth_redirect_uri: str | None = None\n\n\nrouter = APIRouter()\n\n\n@router.get(\"\", response_model=SettingsRead)\nasync def get_settings(_: AdminAuthDep) -> Settings:\n    \"\"\"Get current settings.\"\"\"\n    return settings\n\n\n@router.put(\"\", response_model=SettingsUpdate)\nasync def update_settings(_: AdminAuthDep, updates: SettingsUpdate) -> Settings:\n    \"\"\"Update settings and save to config.json.\"\"\"\n    update_dict = updates.model_dump(exclude_unset=True)\n\n    if not settings.no_filesystem_mode:\n        config_path = settings.data_folder / \"config.json\"\n        settings.data_folder.mkdir(parents=True, exist_ok=True)\n\n        if os.path.exists(config_path):\n            try:\n                with open(config_path, \"r\", encoding=\"utf-8\") as f:\n                    config_data = SettingsUpdate.model_validate_json(f.read())\n            except (json.JSONDecodeError, IOError):\n                config_data = SettingsUpdate()\n        else:\n            config_data = SettingsUpdate()\n\n        config_data = config_data.model_copy(update=update_dict)\n\n        try:\n            with open(config_path, \"w\", encoding=\"utf-8\") as f:\n                f.write(config_data.model_dump_json(exclude_unset=True))\n        except IOError as e:\n            raise HTTPException(\n                status_code=500, detail=f\"Failed to save config: {str(e)}\"\n            )\n\n    for key, value in update_dict.items():\n        if hasattr(settings, key):\n            setattr(settings, key, value)\n\n    return settings\n"
  },
  {
    "path": "app/api/routes/statistics.py",
    "content": "from typing import Literal\nfrom fastapi import APIRouter\nfrom pydantic import BaseModel\n\nfrom app.dependencies.auth import AdminAuthDep\nfrom app.services.account import account_manager\n\n\nclass AccountStats(BaseModel):\n    total_accounts: int\n    valid_accounts: int\n    rate_limited_accounts: int\n    invalid_accounts: int\n    active_sessions: int\n\n\nclass StatisticsResponse(BaseModel):\n    status: Literal[\"healthy\", \"degraded\"]\n    accounts: AccountStats\n\n\nrouter = APIRouter()\n\n\n@router.get(\"\", response_model=StatisticsResponse)\nasync def get_statistics(_: AdminAuthDep):\n    \"\"\"Get system statistics. Requires admin authentication.\"\"\"\n    stats = await account_manager.get_status()\n    return {\n        \"status\": \"healthy\" if stats[\"valid_accounts\"] > 0 else \"degraded\",\n        \"accounts\": stats,\n    }\n"
  },
  {
    "path": "app/core/__init__.py",
    "content": ""
  },
  {
    "path": "app/core/account.py",
    "content": "from typing import List, Optional\nfrom enum import Enum\nfrom datetime import datetime\nfrom dataclasses import dataclass\n\nfrom app.core.exceptions import (\n    ClaudeAuthenticationError,\n    ClaudeRateLimitedError,\n    OAuthAuthenticationNotAllowedError,\n    OrganizationDisabledError,\n)\n\n\nclass AccountStatus(str, Enum):\n    VALID = \"valid\"\n    INVALID = \"invalid\"\n    RATE_LIMITED = \"rate_limited\"\n\n\nclass AuthType(str, Enum):\n    COOKIE_ONLY = \"cookie_only\"\n    OAUTH_ONLY = \"oauth_only\"\n    BOTH = \"both\"\n\n\n@dataclass\nclass OAuthToken:\n    \"\"\"Encapsulates OAuth credentials for an account.\"\"\"\n\n    access_token: str\n    refresh_token: str\n    expires_at: float  # Unix timestamp\n\n    def to_dict(self) -> dict:\n        \"\"\"Convert to dictionary for JSON serialization.\"\"\"\n        return {\n            \"access_token\": self.access_token,\n            \"refresh_token\": self.refresh_token,\n            \"expires_at\": self.expires_at,\n        }\n\n    @classmethod\n    def from_dict(cls, data: dict) -> \"OAuthToken\":\n        \"\"\"Create from dictionary.\"\"\"\n        return cls(\n            access_token=data[\"access_token\"],\n            refresh_token=data[\"refresh_token\"],\n            expires_at=data[\"expires_at\"],\n        )\n\n\nclass Account:\n    \"\"\"Represents a Claude.ai account with cookie and/or OAuth authentication.\"\"\"\n\n    def __init__(\n        self,\n        organization_uuid: str,\n        capabilities: Optional[List[str]] = None,\n        cookie_value: Optional[str] = None,\n        oauth_token: Optional[OAuthToken] = None,\n        auth_type: AuthType = AuthType.COOKIE_ONLY,\n    ):\n        self.organization_uuid = organization_uuid\n        self.capabilities = capabilities\n        self.cookie_value = cookie_value\n        self.status = AccountStatus.VALID\n        self.auth_type = auth_type\n        self.last_used = datetime.now()\n        self.resets_at: Optional[datetime] = None\n        self.oauth_token: Optional[OAuthToken] = oauth_token\n\n    def __enter__(self) -> \"Account\":\n        \"\"\"Enter the context manager.\"\"\"\n        self.last_used = datetime.now()\n        return self\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        \"\"\"Exit the context manager and handle CookieRateLimitedError.\"\"\"\n        if exc_type is ClaudeRateLimitedError and isinstance(\n            exc_val, ClaudeRateLimitedError\n        ):\n            self.status = AccountStatus.RATE_LIMITED\n            self.resets_at = exc_val.resets_at\n            self.save()\n\n        if exc_type is ClaudeAuthenticationError and isinstance(\n            exc_val, ClaudeAuthenticationError\n        ):\n            self.status = AccountStatus.INVALID\n            self.save()\n\n        if exc_type is OrganizationDisabledError and isinstance(\n            exc_val, OrganizationDisabledError\n        ):\n            self.status = AccountStatus.INVALID\n            self.save()\n\n        if exc_type is OAuthAuthenticationNotAllowedError and isinstance(\n            exc_val, OAuthAuthenticationNotAllowedError\n        ):\n            if self.auth_type == AuthType.BOTH:\n                self.auth_type = AuthType.COOKIE_ONLY\n            else:\n                self.status = AccountStatus.INVALID\n            self.save()\n\n        return False\n\n    def save(self) -> None:\n        from app.services.account import account_manager\n\n        account_manager.save_accounts()\n\n    def to_dict(self) -> dict:\n        \"\"\"Convert Account to dictionary for JSON serialization.\"\"\"\n        return {\n            \"organization_uuid\": self.organization_uuid,\n            \"capabilities\": self.capabilities,\n            \"cookie_value\": self.cookie_value,\n            \"status\": self.status.value,\n            \"auth_type\": self.auth_type.value,\n            \"last_used\": self.last_used.isoformat(),\n            \"resets_at\": self.resets_at.isoformat() if self.resets_at else None,\n            \"oauth_token\": self.oauth_token.to_dict() if self.oauth_token else None,\n        }\n\n    @classmethod\n    def from_dict(cls, data: dict) -> \"Account\":\n        \"\"\"Create Account from dictionary.\"\"\"\n        account = cls(\n            organization_uuid=data[\"organization_uuid\"],\n            capabilities=data.get(\"capabilities\"),\n            cookie_value=data.get(\"cookie_value\"),\n            auth_type=AuthType(data[\"auth_type\"]),\n        )\n        account.status = AccountStatus(data[\"status\"])\n        account.last_used = datetime.fromisoformat(data[\"last_used\"])\n        account.resets_at = (\n            datetime.fromisoformat(data[\"resets_at\"]) if data[\"resets_at\"] else None\n        )\n\n        if \"oauth_token\" in data and data[\"oauth_token\"]:\n            account.oauth_token = OAuthToken.from_dict(data[\"oauth_token\"])\n\n        return account\n\n    @property\n    def is_pro(self) -> bool:\n        \"\"\"Check if account has pro capabilities.\"\"\"\n        if not self.capabilities:\n            return False\n\n        pro_keywords = [\"pro\", \"enterprise\", \"raven\", \"max\"]\n        return any(\n            keyword in cap.lower()\n            for cap in self.capabilities\n            for keyword in pro_keywords\n        )\n\n    @property\n    def is_max(self) -> bool:\n        \"\"\"Check if account has max capabilities.\"\"\"\n        if not self.capabilities:\n            return False\n\n        return any(\"max\" in cap.lower() for cap in self.capabilities)\n\n    def __repr__(self) -> str:\n        \"\"\"String representation of the Account.\"\"\"\n        return f\"<Account organization_uuid={self.organization_uuid[:8]}... status={self.status.value} auth_type={self.auth_type.value}>\"\n"
  },
  {
    "path": "app/core/claude_session.py",
    "content": "from typing import Dict, Any, AsyncIterator, Optional\nfrom datetime import datetime\nfrom app.core.http_client import Response\nfrom loguru import logger\n\nfrom app.core.config import settings\nfrom app.core.external.claude_client import ClaudeWebClient\nfrom app.services.account import account_manager\n\n\nclass ClaudeWebSession:\n    def __init__(self, session_id: str):\n        self.session_id = session_id\n        self.last_activity = datetime.now()\n        self.conv_uuid: Optional[str] = None\n        self.paprika_mode: Optional[str] = None\n        self.sse_stream: Optional[AsyncIterator[str]] = None\n\n    async def initialize(self):\n        \"\"\"Initialize the session.\"\"\"\n        self.account = await account_manager.get_account_for_session(self.session_id)\n        self.client = ClaudeWebClient(self.account)\n        await self.client.initialize()\n\n    async def stream(self, response: Response) -> AsyncIterator[str]:\n        \"\"\"Get the SSE stream.\"\"\"\n        buffer = b\"\"\n        async for chunk in response.aiter_bytes():\n            self.update_activity()\n            buffer += chunk\n            lines = buffer.split(b\"\\n\")\n            buffer = lines[-1]\n            for line in lines[:-1]:\n                yield line.decode(\"utf-8\") + \"\\n\"\n\n        if buffer:\n            yield buffer.decode(\"utf-8\")\n\n        logger.debug(f\"Stream completed for session {self.session_id}\")\n\n        from app.services.session import session_manager\n\n        await session_manager.remove_session(self.session_id)\n\n    async def cleanup(self):\n        \"\"\"Cleanup session resources.\"\"\"\n        logger.debug(f\"Cleaning up session {self.session_id}\")\n\n        # Delete conversation if exists\n        if self.conv_uuid and not settings.preserve_chats:\n            await self.client.delete_conversation(self.conv_uuid)\n\n        await account_manager.release_session(self.session_id)\n        await self.client.cleanup()\n\n    async def _ensure_conversation_initialized(self) -> None:\n        \"\"\"Ensure conversation is initialized. Create if not exists.\"\"\"\n        if not self.conv_uuid:\n            conv_uuid, paprika_mode = await self.client.create_conversation()\n            self.conv_uuid = conv_uuid\n            self.paprika_mode = paprika_mode\n\n    def update_activity(self):\n        \"\"\"Update last activity timestamp.\"\"\"\n        self.last_activity = datetime.now()\n\n    async def send_message(self, payload: Dict[str, Any]) -> AsyncIterator[str]:\n        \"\"\"Process a completion request through the pipeline.\"\"\"\n        self.update_activity()\n\n        await self._ensure_conversation_initialized()\n\n        response = await self.client.send_message(\n            payload,\n            conv_uuid=self.conv_uuid,\n        )\n        self.sse_stream = self.stream(response)\n\n        logger.debug(f\"Sent message for session {self.session_id}\")\n        return self.sse_stream\n\n    async def upload_file(\n        self, file_data: bytes, filename: str, content_type: str\n    ) -> str:\n        \"\"\"Upload a file and return file UUID.\"\"\"\n        return await self.client.upload_file(file_data, filename, content_type)\n\n    async def send_tool_result(self, payload: Dict[str, Any]) -> None:\n        \"\"\"Send tool result to Claude.ai.\"\"\"\n        if not self.conv_uuid:\n            raise ValueError(\n                \"Session must have an active conversation to send tool results\"\n            )\n\n        await self.client.send_tool_result(payload, self.conv_uuid)\n\n    async def set_paprika_mode(self, mode: Optional[str]) -> None:\n        \"\"\"Set the conversation mode.\"\"\"\n        await self._ensure_conversation_initialized()\n\n        if self.paprika_mode == mode:\n            return\n\n        await self.client.set_paprika_mode(self.conv_uuid, mode)\n        self.paprika_mode = mode\n"
  },
  {
    "path": "app/core/config.py",
    "content": "import os\nimport json\nfrom pathlib import Path\nfrom typing import Optional, List, Dict, Any\nfrom pydantic_settings import BaseSettings, SettingsConfigDict\nfrom pydantic import Field, HttpUrl, field_validator\nfrom dotenv import load_dotenv\n\nclass Settings(BaseSettings):\n    \"\"\"Application settings with environment variable and JSON config support.\"\"\"\n\n    model_config = SettingsConfigDict(\n        env_file=\".env\",\n        env_ignore_empty=True,\n        extra=\"ignore\",\n    )\n\n    @classmethod\n    def settings_customise_sources(\n        cls,\n        settings_cls,\n        init_settings,\n        env_settings,\n        dotenv_settings,\n        file_secret_settings,\n    ):\n        \"\"\"Customize settings sources to add JSON config support.\n\n        Priority order (highest to lowest):\n        1. JSON config file\n        2. Environment variables\n        3. .env file\n        4. Default values\n        \"\"\"\n        return (\n            init_settings,\n            cls._json_config_settings,\n            env_settings,\n            dotenv_settings,\n            file_secret_settings,\n        )\n\n    @classmethod\n    def _json_config_settings(cls) -> Dict[str, Any]:\n        \"\"\"Load settings from JSON config file in data_folder.\"\"\"\n\n        # Check if NO_FILESYSTEM_MODE is enabled\n        if os.environ.get(\"NO_FILESYSTEM_MODE\", \"\").lower() in (\"true\", \"1\", \"yes\"):\n            return {}\n\n        # Load .env file to ensure environment variables are available\n        load_dotenv()\n\n        # First get data_folder from env or default\n        data_folder = os.environ.get(\n            \"DATA_FOLDER\", str(Path.home() / \".clove\" / \"data\")\n        )\n\n        config_path = os.path.join(data_folder, \"config.json\")\n\n        if os.path.exists(config_path):\n            try:\n                with open(config_path, \"r\", encoding=\"utf-8\") as f:\n                    config_data = json.load(f)\n                    return config_data\n            except (json.JSONDecodeError, IOError):\n                # If there's an error reading the JSON, just return empty dict\n                return {}\n        return {}\n\n    # Server settings\n    host: str = Field(default=\"0.0.0.0\", env=\"HOST\")\n    port: int = Field(default=5201, env=\"PORT\")\n\n    # Application configuration\n    data_folder: Path = Field(\n        default=Path.home() / \".clove\" / \"data\",\n        env=\"DATA_FOLDER\",\n        description=\"Folder path for storing persistent data (accounts, etc.)\",\n    )\n    locales_folder: Path = Field(\n        default=Path(__file__).parent.parent / \"locales\",\n        env=\"LOCALES_FOLDER\",\n        description=\"Folder path for storing translation files\",\n    )\n    static_folder: Path = Field(\n        default=Path(__file__).parent.parent / \"static\",\n        env=\"STATIC_FOLDER\",\n        description=\"Folder path for storing static files\",\n    )\n    default_language: str = Field(\n        default=\"en\",\n        env=\"DEFAULT_LANGUAGE\",\n        description=\"Default language code for translations\",\n    )\n    retry_attempts: int = Field(\n        default=3,\n        env=\"RETRY_ATTEMPTS\",\n        description=\"Number of retry attempts for failed requests\",\n    )\n    retry_interval: int = Field(\n        default=1,\n        env=\"RETRY_INTERVAL\",\n        description=\"Interval between retry attempts in seconds\",\n    )\n    no_filesystem_mode: bool = Field(\n        default=False,\n        env=\"NO_FILESYSTEM_MODE\",\n        description=\"When True, disables all filesystem operations (accounts/settings stored in memory only)\",\n    )\n\n    # Proxy settings\n    proxy_url: Optional[str] = Field(default=None, env=\"PROXY_URL\")\n\n    # API Keys\n    api_keys: List[str] | str = Field(\n        default_factory=list,\n        env=\"API_KEYS\",\n        description=\"Comma-separated list of API keys\",\n    )\n    admin_api_keys: List[str] | str = Field(\n        default_factory=list,\n        env=\"ADMIN_API_KEYS\",\n        description=\"Comma-separated list of admin API keys\",\n    )\n\n    # Claude URLs\n    claude_ai_url: HttpUrl = Field(default=\"https://claude.ai\", env=\"CLAUDE_AI_URL\")\n    claude_api_baseurl: HttpUrl = Field(\n        default=\"https://api.anthropic.com\", env=\"CLAUDE_API_BASEURL\"\n    )\n\n    # Cookies\n    cookies: List[str] | str = Field(\n        default_factory=list,\n        env=\"COOKIES\",\n        description=\"Comma-separated list of Claude.ai cookies\",\n    )\n\n    # Content processing\n    custom_prompt: Optional[str] = Field(default=None, env=\"CUSTOM_PROMPT\")\n    use_real_roles: bool = Field(default=True, env=\"USE_REAL_ROLES\")\n    human_name: str = Field(default=\"Human\", env=\"CUSTOM_HUMAN_NAME\")\n    assistant_name: str = Field(default=\"Assistant\", env=\"CUSTOM_ASSISTANT_NAME\")\n    pad_tokens: List[str] | str = Field(default_factory=list, env=\"PAD_TOKENS\")\n    padtxt_length: int = Field(default=0, env=\"PADTXT_LENGTH\")\n    allow_external_images: bool = Field(\n        default=False,\n        env=\"ALLOW_EXTERNAL_IMAGES\",\n        description=\"Allow downloading images from external URLs\",\n    )\n\n    # Request settings\n    request_timeout: int = Field(default=60, env=\"REQUEST_TIMEOUT\")\n    request_retries: int = Field(default=3, env=\"REQUEST_RETRIES\")\n    request_retry_interval: int = Field(default=1, env=\"REQUEST_RETRY_INTERVAL\")\n\n    # Feature flags\n    preserve_chats: bool = Field(default=False, env=\"PRESERVE_CHATS\")\n\n    # Logging\n    log_level: str = Field(default=\"INFO\", env=\"LOG_LEVEL\")\n    log_to_file: bool = Field(\n        default=False, env=\"LOG_TO_FILE\", description=\"Enable logging to file\"\n    )\n    log_file_path: str = Field(\n        default=\"logs/app.log\", env=\"LOG_FILE_PATH\", description=\"Log file path\"\n    )\n    log_file_rotation: str = Field(\n        default=\"10 MB\",\n        env=\"LOG_FILE_ROTATION\",\n        description=\"Log file rotation (e.g., '10 MB', '1 day', '1 week')\",\n    )\n    log_file_retention: str = Field(\n        default=\"7 days\",\n        env=\"LOG_FILE_RETENTION\",\n        description=\"Log file retention (e.g., '7 days', '1 month')\",\n    )\n    log_file_compression: str = Field(\n        default=\"zip\",\n        env=\"LOG_FILE_COMPRESSION\",\n        description=\"Log file compression format\",\n    )\n\n    # Session management settings\n    session_timeout: int = Field(\n        default=300,\n        env=\"SESSION_TIMEOUT\",\n        description=\"Session idle timeout in seconds\",\n    )\n    session_cleanup_interval: int = Field(\n        default=30,\n        env=\"SESSION_CLEANUP_INTERVAL\",\n        description=\"Interval for cleaning up expired sessions in seconds\",\n    )\n    max_sessions_per_cookie: int = Field(\n        default=3,\n        env=\"MAX_SESSIONS_PER_COOKIE\",\n        description=\"Maximum number of concurrent sessions per cookie\",\n    )\n\n    # Account management settings\n    account_task_interval: int = Field(\n        default=60,\n        env=\"ACCOUNT_TASK_INTERVAL\",\n        description=\"Interval for account management task in seconds\",\n    )\n\n    # Tool call settings\n    tool_call_timeout: int = Field(\n        default=300,\n        env=\"TOOL_CALL_TIMEOUT\",\n        description=\"Timeout for pending tool calls in seconds\",\n    )\n    tool_call_cleanup_interval: int = Field(\n        default=60,\n        env=\"TOOL_CALL_CLEANUP_INTERVAL\",\n        description=\"Interval for cleaning up expired tool calls in seconds\",\n    )\n\n    # Cache settings\n    cache_timeout: int = Field(\n        default=300,\n        env=\"CACHE_TIMEOUT\",\n        description=\"Timeout for cache checkpoints in seconds (default: 5 minutes)\",\n    )\n    cache_cleanup_interval: int = Field(\n        default=60,\n        env=\"CACHE_CLEANUP_INTERVAL\",\n        description=\"Interval for cleaning up expired cache checkpoints in seconds\",\n    )\n\n    # Claude OAuth settings\n    oauth_client_id: str = Field(\n        default=\"9d1c250a-e61b-44d9-88ed-5944d1962f5e\",\n        env=\"OAUTH_CLIENT_ID\",\n        description=\"OAuth client ID for Claude authentication\",\n    )\n    oauth_authorize_url: str = Field(\n        default=\"https://claude.ai/v1/oauth/{organization_uuid}/authorize\",\n        env=\"OAUTH_AUTHORIZE_URL\",\n        description=\"OAuth authorization endpoint URL template\",\n    )\n    oauth_token_url: str = Field(\n        default=\"https://console.anthropic.com/v1/oauth/token\",\n        env=\"OAUTH_TOKEN_URL\",\n        description=\"OAuth token exchange endpoint URL\",\n    )\n    oauth_redirect_uri: str = Field(\n        default=\"https://console.anthropic.com/oauth/code/callback\",\n        env=\"OAUTH_REDIRECT_URI\",\n        description=\"OAuth redirect URI for authorization flow\",\n    )\n\n    # Claude API Specific\n    max_models: List[str] | str = Field(\n        default=[],\n        env=\"MAX_MODELS\",\n        description=\"Comma-separated list of models that require max plan accounts\",\n    )\n\n    @field_validator(\n        \"api_keys\", \"admin_api_keys\", \"cookies\", \"max_models\", \"pad_tokens\"\n    )\n    def parse_comma_separated(cls, v: str | List[str]) -> List[str]:\n        \"\"\"Parse comma-separated string.\"\"\"\n        if isinstance(v, str):\n            return [key.strip() for key in v.split(\",\") if key.strip()]\n        return v\n\n\nsettings = Settings()\n"
  },
  {
    "path": "app/core/error_handler.py",
    "content": "from typing import Dict, Any\nfrom fastapi import Request\nfrom fastapi.responses import JSONResponse\nfrom loguru import logger\n\nfrom app.services.i18n import i18n_service\nfrom app.core.exceptions import AppError\n\n\nclass ErrorHandler:\n    \"\"\"\n    Centralized error handler for the application. Handles AppException.\n    \"\"\"\n\n    @staticmethod\n    def get_language_from_request(request: Request) -> str:\n        \"\"\"Extract language preference from request headers.\"\"\"\n        accept_language = request.headers.get(\"accept-language\")\n        return i18n_service.parse_accept_language(accept_language)\n\n    @staticmethod\n    def format_error_response(\n        error_code: int, message: str, context: Dict[str, Any] = None\n    ) -> Dict[str, Any]:\n        \"\"\"\n        Format error response in standardized format.\n\n        Args:\n            error_code: 6-digit error code\n            message: Localized error message\n            context: Additional context information\n\n        Returns:\n            Formatted error response\n        \"\"\"\n        response = {\"detail\": {\"code\": error_code, \"message\": message}}\n\n        # Add context if provided and not empty\n        if context:\n            response[\"detail\"][\"context\"] = context\n\n        return response\n\n    @staticmethod\n    async def handle_app_exception(request: Request, exc: AppError) -> JSONResponse:\n        \"\"\"\n        Handle AppException instances.\n\n        Args:\n            request: The FastAPI request object\n            exc: The AppException instance\n\n        Returns:\n            JSONResponse with localized error message\n        \"\"\"\n        language = ErrorHandler.get_language_from_request(request)\n\n        # Get localized message\n        message = i18n_service.get_message(\n            message_key=exc.message_key, language=language, context=exc.context\n        )\n\n        # Format response\n        response_data = ErrorHandler.format_error_response(\n            error_code=exc.error_code,\n            message=message,\n            context=exc.context if exc.context else None,\n        )\n\n        # Log the error\n        logger.warning(\n            f\"AppException: {exc.__class__.__name__} - \"\n            f\"Code: {exc.error_code}, Message: {message}, \"\n            f\"Context: {exc.context}\"\n        )\n\n        return JSONResponse(status_code=exc.status_code, content=response_data)\n\n\n# Exception handler functions for FastAPI\nasync def app_exception_handler(request: Request, exc: AppError) -> JSONResponse:\n    \"\"\"FastAPI exception handler for AppException.\"\"\"\n    return await ErrorHandler.handle_app_exception(request, exc)\n"
  },
  {
    "path": "app/core/exceptions.py",
    "content": "from datetime import datetime\nfrom typing import Optional, Any, Dict\n\n\nclass AppError(Exception):\n    \"\"\"\n    Base class for application-specific exceptions.\n    \"\"\"\n\n    def __init__(\n        self,\n        error_code: int,\n        message_key: str,\n        status_code: int,\n        context: Optional[Dict[str, Any]] = None,\n        retryable: bool = False,\n    ):\n        self.error_code = error_code\n        self.message_key = message_key\n        self.status_code = status_code\n        self.context = context if context is not None else {}\n        self.retryable = retryable\n        super().__init__(\n            f\"Error Code: {error_code}, Message Key: {message_key}, Context: {self.context}\"\n        )\n\n    def __str__(self):\n        return f\"{self.__class__.__name__}(error_code={self.error_code}, message_key='{self.message_key}', status_code={self.status_code}, context={self.context})\"\n\n\nclass InternalServerError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=500000,\n            message_key=\"global.internalServerError\",\n            status_code=500,\n            context=context,\n        )\n\n\nclass NoAPIKeyProvidedError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=401010,\n            message_key=\"global.noAPIKeyProvided\",\n            status_code=401,\n            context=context,\n        )\n\n\nclass InvalidAPIKeyError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=401011,\n            message_key=\"global.invalidAPIKey\",\n            status_code=401,\n            context=context,\n        )\n\n\nclass NoAccountsAvailableError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=503100,\n            message_key=\"accountManager.noAccountsAvailable\",\n            status_code=503,\n            context=context,\n            retryable=True,\n        )\n\n\nclass ClaudeRateLimitedError(AppError):\n    resets_at: datetime\n\n    def __init__(self, resets_at: datetime, context: Optional[Dict[str, Any]] = None):\n        self.resets_at = resets_at\n        _context = context.copy() if context else {}\n        _context[\"resets_at\"] = resets_at.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n        super().__init__(\n            error_code=429120,\n            message_key=\"claudeClient.claudeRateLimited\",\n            status_code=429,\n            context=_context,\n            retryable=True,\n        )\n\n\nclass CloudflareBlockedError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=503121,\n            message_key=\"claudeClient.cloudflareBlocked\",\n            status_code=503,\n            context=context,\n        )\n\n\nclass OrganizationDisabledError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=400122,\n            message_key=\"claudeClient.organizationDisabled\",\n            status_code=400,\n            context=context,\n            retryable=True,\n        )\n\n\nclass InvalidModelNameError(AppError):\n    def __init__(self, model_name: str, context: Optional[Dict[str, Any]] = None):\n        _context = context.copy() if context else {}\n        _context[\"model_name\"] = model_name\n        super().__init__(\n            error_code=400123,\n            message_key=\"claudeClient.invalidModelName\",\n            status_code=400,\n            context=_context,\n        )\n\n\nclass ClaudeAuthenticationError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=400124,\n            message_key=\"claudeClient.authenticationError\",\n            status_code=400,\n            context=context,\n        )\n\n\nclass ClaudeHttpError(AppError):\n    def __init__(\n        self,\n        url,\n        status_code: int,\n        error_type: str,\n        error_message: Any,\n        context: Optional[Dict[str, Any]] = None,\n    ):\n        _context = context.copy() if context else {}\n        _context.update({\n            \"url\": url,\n            \"status_code\": status_code,\n            \"error_type\": error_type,\n            \"error_message\": error_message,\n        })\n        super().__init__(\n            error_code=503130,\n            message_key=\"claudeClient.httpError\",\n            status_code=status_code,\n            context=_context,\n            retryable=True,\n        )\n\n\nclass NoValidMessagesError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=400140,\n            message_key=\"messageProcessor.noValidMessages\",\n            status_code=400,\n            context=context,\n        )\n\n\nclass ExternalImageDownloadError(AppError):\n    def __init__(self, url: str, context: Optional[Dict[str, Any]] = None):\n        _context = context.copy() if context else {}\n        _context.update({\"url\": url})\n        super().__init__(\n            error_code=503141,\n            message_key=\"messageProcessor.externalImageDownloadError\",\n            status_code=503,\n            context=_context,\n        )\n\n\nclass ExternalImageNotAllowedError(AppError):\n    def __init__(self, url: str, context: Optional[Dict[str, Any]] = None):\n        _context = context.copy() if context else {}\n        _context.update({\"url\": url})\n        super().__init__(\n            error_code=400142,\n            message_key=\"messageProcessor.externalImageNotAllowed\",\n            status_code=400,\n            context=_context,\n        )\n\n\nclass NoResponseError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=503160,\n            message_key=\"pipeline.noResponse\",\n            status_code=503,\n            context=context,\n        )\n\n\nclass OAuthExchangeError(AppError):\n    def __init__(self, reason: str, context: Optional[Dict[str, Any]] = None):\n        _context = context.copy() if context else {}\n        _context[\"reason\"] = reason or \"Unknown\"\n        super().__init__(\n            error_code=400180,\n            message_key=\"oauthService.oauthExchangeError\",\n            status_code=400,\n            context=_context,\n        )\n\n\nclass OrganizationInfoError(AppError):\n    def __init__(self, reason: str, context: Optional[Dict[str, Any]] = None):\n        _context = context.copy() if context else {}\n        _context[\"reason\"] = reason or \"Unknown\"\n        super().__init__(\n            error_code=503181,\n            message_key=\"oauthService.organizationInfoError\",\n            status_code=503,\n            context=_context,\n        )\n\n\nclass CookieAuthorizationError(AppError):\n    def __init__(self, reason: str, context: Optional[Dict[str, Any]] = None):\n        _context = context.copy() if context else {}\n        _context[\"reason\"] = reason or \"Unknown\"\n        super().__init__(\n            error_code=400182,\n            message_key=\"oauthService.cookieAuthorizationError\",\n            status_code=400,\n            context=_context,\n        )\n\n\nclass OAuthAuthenticationNotAllowedError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=400183,\n            message_key=\"oauthService.oauthAuthenticationNotAllowed\",\n            status_code=400,\n            context=context,\n        )\n\n\nclass ClaudeStreamingError(AppError):\n    def __init__(\n        self,\n        error_type: str,\n        error_message: str,\n        context: Optional[Dict[str, Any]] = None,\n    ):\n        _context = context.copy() if context else {}\n        _context.update({\n            \"error_type\": error_type,\n            \"error_message\": error_message,\n        })\n        super().__init__(\n            error_code=503500,\n            message_key=\"processors.nonStreamingResponseProcessor.streamingError\",\n            status_code=503,\n            context=_context,\n            retryable=True,\n        )\n\n\nclass NoMessageError(AppError):\n    def __init__(self, context: Optional[Dict[str, Any]] = None):\n        super().__init__(\n            error_code=503501,\n            message_key=\"processors.nonStreamingResponseProcessor.noMessage\",\n            status_code=503,\n            context=context,\n            retryable=True,\n        )\n"
  },
  {
    "path": "app/core/external/claude_client.py",
    "content": "import json\nfrom loguru import logger\nfrom datetime import datetime, timezone\nfrom typing import Optional, Dict, Any\nfrom urllib.parse import urljoin\nfrom uuid import uuid4\n\nfrom app.core.http_client import (\n    create_session,\n    Response,\n    AsyncSession,\n)\n\nfrom app.core.config import settings\nfrom app.core.exceptions import (\n    ClaudeAuthenticationError,\n    ClaudeRateLimitedError,\n    CloudflareBlockedError,\n    OrganizationDisabledError,\n    ClaudeHttpError,\n)\nfrom app.models.internal import UploadResponse\nfrom app.core.account import Account\n\n\nclass ClaudeWebClient:\n    \"\"\"Client for interacting with Claude.ai.\"\"\"\n\n    def __init__(self, account: Account):\n        self.account = account\n        self.session: Optional[AsyncSession] = None\n        self.endpoint = settings.claude_ai_url.encoded_string().rstrip(\"/\")\n\n    async def initialize(self):\n        \"\"\"Initialize the client session.\"\"\"\n        self.session = create_session(\n            timeout=settings.request_timeout,\n            impersonate=\"chrome\",\n            proxy=settings.proxy_url,\n            follow_redirects=False,\n        )\n\n    async def cleanup(self):\n        \"\"\"Clean up resources.\"\"\"\n        if self.session:\n            await self.session.close()\n\n    def _build_headers(\n        self, cookie: str, conv_uuid: Optional[str] = None\n    ) -> Dict[str, str]:\n        \"\"\"Build request headers.\"\"\"\n        headers = {\n            \"Accept\": \"text/event-stream\",\n            \"Accept-Language\": \"en-US,en;q=0.9\",\n            \"Cache-Control\": \"no-cache\",\n            \"Cookie\": cookie,\n            \"Origin\": self.endpoint,\n            \"Referer\": f\"{self.endpoint}/new\",\n            \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36\",\n        }\n\n        if conv_uuid:\n            headers[\"Referer\"] = f\"{self.endpoint}/chat/{conv_uuid}\"\n\n        return headers\n\n    async def _request(\n        self,\n        method: str,\n        url: str,\n        conv_uuid: Optional[str] = None,\n        stream=None,\n        **kwargs,\n    ) -> Response:\n        \"\"\"Make HTTP request with error handling.\"\"\"\n        if not self.session:\n            await self.initialize()\n\n        with self.account as account:\n            cookie_value = account.cookie_value\n            headers = self._build_headers(cookie_value, conv_uuid)\n            kwargs[\"headers\"] = {**headers, **kwargs.get(\"headers\", {})}\n            response: Response = await self.session.request(\n                method=method, url=url, stream=stream, **kwargs\n            )\n\n            if response.status_code < 300:\n                return response\n\n            if response.status_code == 302:\n                raise CloudflareBlockedError()\n\n            try:\n                error_data = await response.json()\n                error_body = error_data.get(\"error\", {})\n                error_message = error_body.get(\"message\", \"Unknown error\")\n                error_type = error_body.get(\"type\", \"unknown\")\n            except Exception:\n                error_message = f\"HTTP {response.status_code} error with empty response\"\n                error_type = \"empty_response\"\n\n            if (\n                response.status_code == 400\n                and error_message == \"This organization has been disabled.\"\n            ):\n                raise OrganizationDisabledError()\n\n            if response.status_code == 403 and error_message == \"Invalid authorization\":\n                raise ClaudeAuthenticationError()\n\n            if response.status_code == 429:\n                try:\n                    error_message_data = json.loads(error_message)\n                    resets_at = error_message_data.get(\"resetsAt\")\n                    if resets_at and isinstance(resets_at, int):\n                        reset_time = datetime.fromtimestamp(resets_at, tz=timezone.utc)\n                        logger.error(f\"Rate limit exceeded, resets at: {reset_time}\")\n                        raise ClaudeRateLimitedError(resets_at=reset_time)\n                except json.JSONDecodeError:\n                    pass\n\n            raise ClaudeHttpError(\n                url=url,\n                status_code=response.status_code,\n                error_type=error_type,\n                error_message=error_message,\n            )\n\n    async def create_conversation(self) -> str:\n        \"\"\"Create a new conversation.\"\"\"\n        url = urljoin(\n            self.endpoint,\n            f\"/api/organizations/{self.account.organization_uuid}/chat_conversations\",\n        )\n\n        uuid = uuid4()\n\n        payload = {\n            \"name\": \"Hello World!\",\n            \"uuid\": str(uuid),\n        }\n        response = await self._request(\"POST\", url, json=payload)\n\n        data = await response.json()\n        conv_uuid = data.get(\"uuid\")\n        paprika_mode = data.get(\"settings\", {}).get(\"paprika_mode\")\n        logger.info(f\"Created conversation: {conv_uuid}\")\n\n        return conv_uuid, paprika_mode\n\n    async def set_paprika_mode(self, conv_uuid: str, mode: Optional[str]) -> None:\n        \"\"\"Set the conversation mode.\"\"\"\n        url = urljoin(\n            self.endpoint,\n            f\"/api/organizations/{self.account.organization_uuid}/chat_conversations/{conv_uuid}\",\n        )\n        payload = {\"settings\": {\"paprika_mode\": mode}}\n        await self._request(\"PUT\", url, json=payload)\n        logger.debug(f\"Set conversation {conv_uuid} mode: {mode}\")\n\n    async def upload_file(\n        self, file_data: bytes, filename: str, content_type: str\n    ) -> str:\n        \"\"\"Upload a file and return file UUID.\"\"\"\n        url = urljoin(self.endpoint, f\"/api/{self.account.organization_uuid}/upload\")\n        files = {\"file\": (filename, file_data, content_type)}\n\n        response = await self._request(\"POST\", url, files=files)\n\n        data = UploadResponse.model_validate(await response.json())\n        return data.file_uuid\n\n    async def send_message(self, payload: Dict[str, Any], conv_uuid: str) -> Response:\n        \"\"\"Send a message and return the response.\"\"\"\n        url = urljoin(\n            self.endpoint,\n            f\"/api/organizations/{self.account.organization_uuid}/chat_conversations/{conv_uuid}/completion\",\n        )\n\n        headers = {\n            \"Accept\": \"text/event-stream\",\n        }\n\n        response = await self._request(\n            \"POST\", url, conv_uuid=conv_uuid, json=payload, headers=headers, stream=True\n        )\n\n        return response\n\n    async def send_tool_result(self, payload: Dict[str, Any], conv_uuid: str):\n        \"\"\"Send tool result to Claude.ai.\"\"\"\n        url = urljoin(\n            self.endpoint,\n            f\"/api/organizations/{self.account.organization_uuid}/chat_conversations/{conv_uuid}/tool_result\",\n        )\n\n        await self._request(\"POST\", url, conv_uuid=conv_uuid, json=payload)\n\n    async def delete_conversation(self, conv_uuid: str) -> None:\n        \"\"\"Delete a conversation.\"\"\"\n        if not conv_uuid:\n            return\n\n        url = urljoin(\n            self.endpoint,\n            f\"/api/organizations/{self.account.organization_uuid}/chat_conversations/{conv_uuid}\",\n        )\n        try:\n            await self._request(\"DELETE\", url, conv_uuid=conv_uuid)\n            logger.info(f\"Deleted conversation: {conv_uuid}\")\n        except Exception as e:\n            logger.warning(f\"Failed to delete conversation: {e}\")\n"
  },
  {
    "path": "app/core/http_client.py",
    "content": "\"\"\"HTTP client abstraction layer that supports both curl_cffi and httpx.\"\"\"\n\nfrom abc import ABC, abstractmethod\nfrom typing import Optional, Dict, Any, Tuple, AsyncIterator\nfrom tenacity import (\n    retry,\n    retry_if_exception_type,\n    stop_after_attempt,\n    wait_fixed,\n)\nfrom loguru import logger\nimport json\n\nfrom app.core.config import settings\nfrom app.utils.retry import log_before_sleep\n\ntry:\n    import rnet\n    from rnet import Client as RnetClient, Method as RnetMethod\n    from rnet.exceptions import RequestError as RnetRequestError\n\n    RNET_AVAILABLE = True\nexcept ImportError:\n    RNET_AVAILABLE = False\n\ntry:\n    from curl_cffi.requests import (\n        AsyncSession as CurlAsyncSession,\n        Response as CurlResponse,\n    )\n    from curl_cffi.requests.exceptions import RequestException as CurlRequestException\n    import curl_cffi\n\n    CURL_CFFI_AVAILABLE = True\nexcept ImportError:\n    CURL_CFFI_AVAILABLE = False\n\n# Always try to import httpx as fallback\ntry:\n    import httpx\n\n    HTTPX_AVAILABLE = True\nexcept ImportError:\n    HTTPX_AVAILABLE = False\n\nif not RNET_AVAILABLE and not CURL_CFFI_AVAILABLE and not HTTPX_AVAILABLE:\n    raise ImportError(\n        \"Neither rnet, curl_cffi nor httpx is installed. Please install at least one of them.\"\n    )\n\n\nclass Response(ABC):\n    \"\"\"Abstract response class.\"\"\"\n\n    @property\n    @abstractmethod\n    def status_code(self) -> int:\n        \"\"\"Get response status code.\"\"\"\n        pass\n\n    @abstractmethod\n    async def json(self) -> Any:\n        \"\"\"Parse response as JSON.\"\"\"\n        pass\n\n    @property\n    @abstractmethod\n    def headers(self) -> Dict[str, str]:\n        \"\"\"Get response headers.\"\"\"\n        pass\n\n    @abstractmethod\n    def aiter_bytes(self, chunk_size: Optional[int] = None) -> AsyncIterator[bytes]:\n        \"\"\"Iterate over response bytes.\"\"\"\n        pass\n\n\nclass CurlResponseWrapper(Response):\n    \"\"\"curl_cffi response wrapper.\"\"\"\n\n    def __init__(self, response: \"CurlResponse\", stream: bool = False):\n        self._response = response\n        self._stream = stream\n\n    @property\n    def status_code(self) -> int:\n        return self._response.status_code\n\n    async def json(self) -> Any:\n        if self._stream:\n            content = \"\"\n            async for chunk in self._response.aiter_content():\n                content += chunk.decode(\"utf-8\")\n            return json.loads(content)\n        else:\n            return self._response.json()\n\n    @property\n    def headers(self) -> Dict[str, str]:\n        return self._response.headers\n\n    async def aiter_bytes(\n        self, chunk_size: Optional[int] = None\n    ) -> AsyncIterator[bytes]:\n        async for chunk in self._response.aiter_content(chunk_size):\n            yield chunk\n        await self._response.aclose()\n\n\nclass HttpxResponse(Response):\n    \"\"\"httpx response wrapper.\"\"\"\n\n    def __init__(self, response: httpx.Response):\n        self._response = response\n\n    @property\n    def status_code(self) -> int:\n        return self._response.status_code\n\n    async def json(self) -> Any:\n        await self._response.aread()\n        return self._response.json()\n\n    @property\n    def headers(self) -> Dict[str, str]:\n        return self._response.headers\n\n    async def aiter_bytes(\n        self, chunk_size: Optional[int] = None\n    ) -> AsyncIterator[bytes]:\n        async for chunk in self._response.aiter_bytes(chunk_size):\n            yield chunk\n        await self._response.aclose()\n\n\nif RNET_AVAILABLE:\n\n    class RnetResponse(Response):\n        \"\"\"rnet response wrapper.\"\"\"\n\n        def __init__(self, response: \"rnet.Response\"):\n            self._response = response\n\n        @property\n        def status_code(self) -> int:\n            return self._response.status.as_int()\n\n        async def json(self) -> Any:\n            return await self._response.json()\n\n        @property\n        def headers(self) -> Dict[str, str]:\n            headers_dict = {}\n            for key, value in self._response.headers:\n                key_str = key.decode(\"utf-8\") if isinstance(key, bytes) else key\n                value_str = value.decode(\"utf-8\") if isinstance(value, bytes) else value\n                headers_dict[key_str] = value_str\n            return headers_dict\n\n        async def aiter_bytes(\n            self, chunk_size: Optional[int] = None\n        ) -> AsyncIterator[bytes]:\n            async with self._response.stream() as streamer:\n                async for chunk in streamer:\n                    yield chunk\n            await self._response.close()\n\n\nclass AsyncSession(ABC):\n    \"\"\"Abstract async session class.\"\"\"\n\n    @abstractmethod\n    async def request(\n        self,\n        method: str,\n        url: str,\n        headers: Optional[Dict[str, str]] = None,\n        json: Optional[Any] = None,\n        data: Optional[Any] = None,\n        stream: bool = False,\n        **kwargs,\n    ) -> Response:\n        \"\"\"Make an HTTP request.\"\"\"\n        pass\n\n    @abstractmethod\n    async def close(self):\n        \"\"\"Close the session.\"\"\"\n        pass\n\n    async def __aenter__(self):\n        return self\n\n    async def __aexit__(self, exc_type, exc_val, exc_tb):\n        await self.close()\n\n\nif CURL_CFFI_AVAILABLE:\n\n    class CurlAsyncSessionWrapper(AsyncSession):\n        \"\"\"curl_cffi async session wrapper.\"\"\"\n\n        def __init__(\n            self,\n            timeout: int = settings.request_timeout,\n            impersonate: str = \"chrome\",\n            proxy: Optional[str] = settings.proxy_url,\n            follow_redirects: bool = True,\n        ):\n            self._session = CurlAsyncSession(\n                timeout=timeout,\n                impersonate=impersonate,\n                proxy=proxy,\n                allow_redirects=follow_redirects,\n            )\n\n        def process_files(self, files: dict) -> curl_cffi.CurlMime:\n            # Create multipart form\n            multipart = curl_cffi.CurlMime()\n\n            # Handle different file formats\n            if isinstance(files, dict):\n                for field_name, file_info in files.items():\n                    if isinstance(file_info, tuple):\n                        # Format: {\"field\": (filename, data, content_type)}\n                        if len(file_info) >= 3:\n                            filename, file_data, content_type = file_info[:3]\n                        elif len(file_info) == 2:\n                            filename, file_data = file_info\n                            content_type = \"application/octet-stream\"\n                        else:\n                            raise ValueError(\n                                f\"Invalid file tuple format for field {field_name}\"\n                            )\n\n                        multipart.addpart(\n                            name=field_name,\n                            content_type=content_type,\n                            filename=filename,\n                            data=file_data,\n                        )\n                    else:\n                        # Simple format: {\"field\": data}\n                        multipart.addpart(\n                            name=field_name,\n                            data=file_info,\n                        )\n\n            return multipart\n\n        @retry(\n            stop=stop_after_attempt(settings.request_retries),\n            wait=wait_fixed(settings.request_retry_interval),\n            retry=retry_if_exception_type(CurlRequestException),\n            before_sleep=log_before_sleep,\n            reraise=True,\n        )\n        async def request(\n            self,\n            method: str,\n            url: str,\n            headers: Optional[Dict[str, str]] = None,\n            json: Optional[Any] = None,\n            data: Optional[Any] = None,\n            stream: bool = False,\n            **kwargs,\n        ) -> Response:\n            logger.debug(f\"Making {method} request to {url}\")\n\n            # Handle file uploads - convert files parameter to multipart\n            files = kwargs.pop(\"files\", None)\n\n            multipart = None\n\n            if files:\n                multipart = self.process_files(files)\n                kwargs[\"multipart\"] = multipart\n\n            try:\n                response = await self._session.request(\n                    method=method,\n                    url=url,\n                    headers=headers,\n                    json=json,\n                    data=data,\n                    stream=stream,\n                    **kwargs,\n                )\n                return CurlResponseWrapper(response, stream=stream)\n            finally:\n                if multipart:\n                    multipart.close()\n\n        async def close(self):\n            await self._session.close()\n\n\nif RNET_AVAILABLE:\n\n    class RnetAsyncSession(AsyncSession):\n        \"\"\"rnet async session wrapper.\"\"\"\n\n        def __init__(\n            self,\n            timeout: int = settings.request_timeout,\n            impersonate: str = \"chrome\",\n            proxy: Optional[str] = settings.proxy_url,\n            follow_redirects: bool = True,\n        ):\n            # Map impersonate string to rnet Emulation enum\n            emulation_map = {\n                \"chrome\": rnet.Emulation.Chrome142,\n                \"firefox\": rnet.Emulation.Firefox136,\n                \"safari\": rnet.Emulation.Safari18,\n                \"edge\": rnet.Emulation.Edge134,\n            }\n\n            # Use Chrome as default if not found in map\n            rnet_emulation = emulation_map.get(\n                impersonate.lower(), rnet.Emulation.Chrome142\n            )\n\n            # Create proxy list if proxy is provided\n            proxies = None\n            if proxy:\n                proxies = [rnet.Proxy.all(proxy)]\n\n            self._client = RnetClient(\n                emulation=rnet_emulation,\n                timeout=timeout,\n                proxies=proxies,\n                allow_redirects=follow_redirects,\n            )\n\n        @retry(\n            stop=stop_after_attempt(settings.request_retries),\n            wait=wait_fixed(settings.request_retry_interval),\n            retry=retry_if_exception_type(RnetRequestError),\n            before_sleep=log_before_sleep,\n            reraise=True,\n        )\n        async def request(\n            self,\n            method: str,\n            url: str,\n            headers: Optional[Dict[str, str]] = None,\n            json: Optional[Any] = None,\n            data: Optional[Any] = None,\n            stream: bool = False,\n            **kwargs,\n        ) -> Response:\n            logger.debug(f\"Making {method} request to {url}\")\n\n            # Map method string to rnet Method enum\n            method_map = {\n                \"GET\": RnetMethod.GET,\n                \"POST\": RnetMethod.POST,\n                \"PUT\": RnetMethod.PUT,\n                \"DELETE\": RnetMethod.DELETE,\n                \"PATCH\": RnetMethod.PATCH,\n                \"HEAD\": RnetMethod.HEAD,\n                \"OPTIONS\": RnetMethod.OPTIONS,\n                \"TRACE\": RnetMethod.TRACE,\n            }\n\n            rnet_method = method_map.get(method.upper(), RnetMethod.GET)\n\n            # Handle file uploads - convert files parameter to multipart\n            files = kwargs.pop(\"files\", None)\n            multipart = None\n\n            if files:\n                # Convert files dict to rnet Multipart\n                parts = []\n                for field_name, file_info in files.items():\n                    if isinstance(file_info, tuple):\n                        # Format: {\"field\": (filename, data, content_type)}\n                        if len(file_info) >= 3:\n                            filename, file_data, content_type = file_info[:3]\n                        elif len(file_info) == 2:\n                            filename, file_data = file_info\n                            content_type = \"application/octet-stream\"\n                        else:\n                            raise ValueError(\n                                f\"Invalid file tuple format for field {field_name}\"\n                            )\n\n                        parts.append(\n                            rnet.Part(\n                                name=field_name,\n                                value=file_data,\n                                filename=filename,\n                                mime=content_type,\n                            )\n                        )\n                    else:\n                        # Simple format: {\"field\": data}\n                        parts.append(rnet.Part(name=field_name, value=file_info))\n\n                multipart = rnet.Multipart(*parts)\n                kwargs[\"multipart\"] = multipart\n\n            request_kwargs = {}\n            if headers:\n                request_kwargs[\"headers\"] = headers\n            if json is not None:\n                request_kwargs[\"json\"] = json\n            elif data is not None:\n                # rnet uses 'form' for form data, 'body' for raw data\n                if isinstance(data, dict) or isinstance(data, list):\n                    request_kwargs[\"form\"] = (\n                        [(k, v) for k, v in data.items()]\n                        if isinstance(data, dict)\n                        else data\n                    )\n                else:\n                    request_kwargs[\"body\"] = data\n\n            request_kwargs.update(kwargs)\n\n            response = await self._client.request(\n                method=rnet_method,\n                url=url,\n                **request_kwargs,\n            )\n\n            return RnetResponse(response)\n\n        async def close(self):\n            # rnet Client doesn't have an explicit close method\n            # The connection pooling is handled internally\n            pass\n\n\nif HTTPX_AVAILABLE:\n\n    class HttpxAsyncSession(AsyncSession):\n        \"\"\"httpx async session wrapper.\"\"\"\n\n        def __init__(\n            self,\n            timeout: int = settings.request_timeout,\n            impersonate: str = \"chrome\",\n            proxy: Optional[str] = settings.proxy_url,\n            follow_redirects: bool = True,\n        ):\n            self._client = httpx.AsyncClient(\n                timeout=timeout,\n                proxy=proxy,\n                follow_redirects=follow_redirects,\n            )\n\n        async def stream(\n            self,\n            method: str,\n            url: str,\n            headers: Optional[Dict[str, str]] = None,\n            json: Optional[Any] = None,\n            data: Optional[Any] = None,\n            **kwargs,\n        ) -> Response:\n            \"\"\"\n            Alternative to `httpx.request()` that streams the response body\n            instead of loading it into memory at once.\n\n            **Parameters**: See `httpx.request`.\n\n            See also: [Streaming Responses][0]\n\n            [0]: /quickstart#streaming-responses\n            \"\"\"\n            request = self._client.build_request(\n                method=method,\n                url=url,\n                data=data,\n                json=json,\n                headers=headers,\n                **kwargs,\n            )\n            response = await self._client.send(\n                request=request,\n                stream=True,\n            )\n\n            return response\n\n        @retry(\n            stop=stop_after_attempt(settings.request_retries),\n            wait=wait_fixed(settings.request_retry_interval),\n            retry=retry_if_exception_type(httpx.RequestError),\n            before_sleep=log_before_sleep,\n            reraise=True,\n        )\n        async def request(\n            self,\n            method: str,\n            url: str,\n            headers: Optional[Dict[str, str]] = None,\n            json: Optional[Any] = None,\n            data: Optional[Any] = None,\n            stream: bool = False,\n            **kwargs,\n        ) -> Response:\n            logger.debug(f\"Making {method} request to {url}\")\n            if stream:\n                response = await self.stream(\n                    method=method,\n                    url=url,\n                    headers=headers,\n                    json=json,\n                    data=data,\n                    **kwargs,\n                )\n            else:\n                response = await self._client.request(\n                    method=method,\n                    url=url,\n                    headers=headers,\n                    json=json,\n                    data=data,\n                    **kwargs,\n                )\n\n            return HttpxResponse(response)\n\n        async def close(self):\n            await self._client.aclose()\n\n\ndef create_session(\n    timeout: int = settings.request_timeout,\n    impersonate: str = \"chrome\",\n    proxy: Optional[str] = settings.proxy_url,\n    follow_redirects: bool = True,\n) -> AsyncSession:\n    \"\"\"Create an async session using the available HTTP client.\n\n    Prefers rnet if available, then curl_cffi, falls back to httpx.\n    \"\"\"\n    if RNET_AVAILABLE:\n        logger.debug(\"Using rnet as HTTP client\")\n        return RnetAsyncSession(\n            timeout=timeout,\n            impersonate=impersonate,\n            proxy=proxy,\n            follow_redirects=follow_redirects,\n        )\n    elif CURL_CFFI_AVAILABLE:\n        logger.debug(\"Using curl_cffi as HTTP client\")\n        return CurlAsyncSessionWrapper(\n            timeout=timeout,\n            impersonate=impersonate,\n            proxy=proxy,\n            follow_redirects=follow_redirects,\n        )\n    else:\n        logger.debug(\"Using httpx as HTTP client (rnet and curl_cffi not available)\")\n        return HttpxAsyncSession(\n            timeout=timeout,\n            impersonate=impersonate,\n            proxy=proxy,\n            follow_redirects=follow_redirects,\n        )\n\n\ndef create_plain_session(\n    timeout: int = settings.request_timeout,\n    proxy: Optional[str] = settings.proxy_url,\n    follow_redirects: bool = True,\n) -> AsyncSession:\n    \"\"\"Create a plain HTTP session WITHOUT browser fingerprinting/impersonation.\n\n    Used for API endpoints (e.g. OAuth token exchange at console.anthropic.com)\n    that reject requests containing browser-injected headers (User-Agent, Origin,\n    TLS fingerprints) with 429 errors.\n\n    Prefers httpx (zero header injection). Falls back to curl_cffi or rnet\n    without impersonation if httpx is unavailable.\n    \"\"\"\n    if HTTPX_AVAILABLE:\n        logger.debug(\"Using httpx as plain HTTP client\")\n        return HttpxAsyncSession(\n            timeout=timeout,\n            proxy=proxy,\n            follow_redirects=follow_redirects,\n        )\n    elif CURL_CFFI_AVAILABLE:\n        logger.debug(\"Using curl_cffi (no impersonation) as plain HTTP client\")\n        return CurlAsyncSessionWrapper(\n            timeout=timeout,\n            impersonate=None,\n            proxy=proxy,\n            follow_redirects=follow_redirects,\n        )\n    else:\n        logger.debug(\"Using rnet (no impersonation) as plain HTTP client\")\n        return RnetAsyncSession(\n            timeout=timeout,\n            impersonate=None,\n            proxy=proxy,\n            follow_redirects=follow_redirects,\n        )\n\n\nasync def download_image(url: str, timeout: int = 30) -> Tuple[bytes, str]:\n    \"\"\"Download an image from a URL and return content and content type.\n\n    Uses the unified session interface that works with both curl_cffi and httpx.\n    \"\"\"\n    async with create_session(timeout=timeout) as session:\n        response = await session.request(\"GET\", url)\n        content_type = response.headers.get(\"content-type\", \"image/jpeg\")\n\n        # Read the response content\n        content = b\"\"\n        async for chunk in response.aiter_bytes():\n            content += chunk\n\n        return content, content_type\n\n\n# Export the appropriate exception class\nif RNET_AVAILABLE:\n    RequestException = RnetRequestError\nelif CURL_CFFI_AVAILABLE:\n    RequestException = CurlRequestException\nelse:\n    RequestException = httpx.RequestError\n"
  },
  {
    "path": "app/core/static.py",
    "content": "from fastapi import FastAPI, HTTPException\nfrom fastapi.responses import FileResponse\nfrom fastapi.staticfiles import StaticFiles\nfrom loguru import logger\n\nfrom app.core.config import settings\n\n\ndef register_static_routes(app: FastAPI):\n    \"\"\"Register static file routes for the application.\"\"\"\n\n    if settings.static_folder.exists():\n        app.mount(\n            \"/assets\",\n            StaticFiles(directory=str(settings.static_folder / \"assets\")),\n            name=\"assets\",\n        )\n\n        # Serve index.html for SPA routes\n        @app.get(\"/{full_path:path}\")\n        async def serve_spa(full_path: str):\n            \"\"\"Serve index.html for all non-API routes (SPA support).\"\"\"\n            index_path = settings.static_folder / \"index.html\"\n            if index_path.exists():\n                return FileResponse(str(index_path))\n            raise HTTPException(status_code=404, detail=\"Frontend not built\")\n    else:\n        logger.warning(\n            \"Static files directory not found. Run 'pnpm build' in the front directory to build the frontend.\"\n        )\n"
  },
  {
    "path": "app/dependencies/__init__.py",
    "content": ""
  },
  {
    "path": "app/dependencies/auth.py",
    "content": "from typing import Optional, Annotated\nfrom loguru import logger\nfrom fastapi import Depends, Header\nimport secrets\n\nfrom app.core.config import settings\nfrom app.core.exceptions import InvalidAPIKeyError\n\n_temp_admin_api_key: Optional[str] = None\n\nif not settings.admin_api_keys:\n    _temp_admin_api_key = f\"sk-admin-{secrets.token_urlsafe(32)}\"\n    logger.warning(\n        f\"No admin API keys configured. Generated temporary admin API key: {_temp_admin_api_key}\"\n    )\n    logger.warning(\n        \"This is a temporary key and will not be saved. Please configure admin API keys in settings.\"\n    )\n\n\nasync def get_api_key(\n    x_api_key: Annotated[Optional[str], Header()] = None,\n    authorization: Annotated[Optional[str], Header()] = None,\n) -> str:\n    # Check X-API-Key header\n    api_key = x_api_key\n\n    # Check Authorization header\n    if not api_key and authorization:\n        if authorization.startswith(\"Bearer \"):\n            api_key = authorization[7:]\n\n    if not api_key:\n        raise InvalidAPIKeyError()\n\n    return api_key\n\n\nAPIKeyDep = Annotated[str, Depends(get_api_key)]\n\n\nasync def verify_api_key(\n    api_key: APIKeyDep,\n) -> str:\n    # Verify against configured keys\n    valid_keys = settings.api_keys + settings.admin_api_keys + [_temp_admin_api_key]\n\n    if not valid_keys:\n        logger.error(\"No API keys configured, Please configure at least one API key.\")\n        raise InvalidAPIKeyError()\n\n    if api_key not in valid_keys:\n        raise InvalidAPIKeyError()\n\n    return api_key\n\n\nAuthDep = Annotated[str, Depends(verify_api_key)]\n\n\nasync def verify_admin_api_key(\n    api_key: APIKeyDep,\n) -> str:\n    # Verify against configured admin keys\n    valid_keys = settings.admin_api_keys + [_temp_admin_api_key]\n\n    if not valid_keys:\n        logger.error(\n            \"No admin API keys configured, Please configure at least one admin API key.\"\n        )\n        raise InvalidAPIKeyError()\n\n    if api_key not in valid_keys:\n        raise InvalidAPIKeyError()\n\n    return api_key\n\n\nAdminAuthDep = Annotated[str, Depends(verify_admin_api_key)]\n"
  },
  {
    "path": "app/locales/en.json",
    "content": "{\n  \"global\": {\n    \"internalServerError\": \"An internal server error occurred. Please try again later.\",\n    \"noAPIKeyProvided\": \"No API key provided. Please include an API key in the request.\",\n    \"invalidAPIKey\": \"Invalid API key. Please check your API key and try again.\"\n  },\n  \"accountManager\": {\n    \"noAccountsAvailable\": \"No accounts are currently available. Please try again later.\"\n  },\n  \"oauthService\": {\n    \"oauthExchangeError\": \"Failed to exchange authorization code for tokens.\",\n    \"organizationInfoError\": \"Failed to get organization Info: {reason}\",\n    \"cookieAuthorizationError\": \"Failed to authorize with cookie: {reason}\",\n    \"oauthAuthenticationNotAllowed\": \"OAuth authentication is not allowed for this organization. Only Pro and Max accounts support OAuth authentication.\"\n  },\n  \"claudeClient\": {\n    \"claudeRateLimited\": \"Claude AI rate limit exceeded. Please try again after {resets_at}.\",\n    \"cloudflareBlocked\": \"Request blocked by Cloudflare. Please check your IP address.\",\n    \"organizationDisabled\": \"Your Claude AI account has been disabled.\",\n    \"httpError\": \"HTTP error occurred when calling Claude AI: {error_type} - {error_message} (Status: {status_code})\",\n    \"invalidModelName\": \"Invalid model name provided. Please ensure you have access to model {model_name}.\",\n    \"authenticationError\": \"Authentication error. Please check your Claude Cookie or OAuth credentials, and ensure you have installed the curl dependency and are not in a Termux environment.\"\n  },\n  \"messageProcessor\": {\n    \"noValidMessages\": \"No valid messages found in the request.\",\n    \"externalImageDownloadError\": \"Failed to download external image from: {url}\",\n    \"externalImageNotAllowed\": \"External images are not allowed: {url}\"\n  },\n  \"pipeline\": {\n    \"noResponse\": \"No response received from the service. Please try again.\"\n  },\n  \"processors\": {\n    \"nonStreamingResponseProcessor\": {\n      \"streamingError\": \"Streaming error occurred: {error_type} - {error_message}\",\n      \"noMessage\": \"No message received in the response.\"\n    }\n  }\n}\n"
  },
  {
    "path": "app/locales/zh.json",
    "content": "{\n  \"global\": {\n    \"internalServerError\": \"服务器内部错误。请稍后重试。\",\n    \"noAPIKeyProvided\": \"未提供 API 密钥。请在请求中包含 API 密钥。\",\n    \"invalidAPIKey\": \"无效的 API 密钥。请检查您的 API 密钥并重试。\"\n  },\n  \"accountManager\": {\n    \"noAccountsAvailable\": \"当前没有可用的账户。请稍后重试。\"\n  },\n  \"oauthService\": {\n    \"oauthExchangeError\": \"无法将授权代码兑换为令牌：{reason}\",\n    \"organizationInfoError\": \"无法获取组织信息：{reason}\",\n    \"cookieAuthorizationError\": \"无法使用 Cookie 进行授权：{reason}\",\n    \"oauthAuthenticationNotAllowed\": \"此组织不允许 OAuth 认证。仅有 Pro 和 Max 账户支持 OAuth 认证。\"\n  },\n  \"claudeClient\": {\n    \"claudeRateLimited\": \"Claude API 速率限制已超出。请在 {resets_at} 后重试。\",\n    \"cloudflareBlocked\": \"请求被 Cloudflare 阻止。请检查您的连接。\",\n    \"organizationDisabled\": \"您的 Claude AI 账户已被禁用。\",\n    \"httpError\": \"请求 Claude AI 时发生 HTTP 错误：{error_type} - {error_message}（状态码：{status_code}）\",\n    \"invalidModelName\": \"提供的模型名称无效。请确保您有权访问 {model_name} 模型。\",\n    \"authenticationError\": \"身份验证错误。请检查您的 Claude Cookie 或 OAuth 凭证；并确保安装了 curl 依赖且不在 Termux 环境下。\"\n  },\n  \"messageProcessor\": {\n    \"noValidMessages\": \"请求中未找到有效消息。\",\n    \"externalImageDownloadError\": \"无法从以下地址下载外部图片：{url}\",\n    \"externalImageNotAllowed\": \"不允许使用外部图片：{url}\"\n  },\n  \"pipeline\": {\n    \"noResponse\": \"未收到服务响应。请重试。\"\n  },\n  \"processors\": {\n    \"nonStreamingResponseProcessor\": {\n      \"streamingError\": \"流式传输中收到错误：{error_type} - {error_message}\",\n      \"noMessage\": \"响应中未收到消息。\"\n    }\n  }\n}\n"
  },
  {
    "path": "app/main.py",
    "content": "from loguru import logger\nfrom contextlib import asynccontextmanager\nfrom fastapi import FastAPI\nfrom fastapi.middleware.cors import CORSMiddleware\n\nfrom app.api.main import api_router\nfrom app.core.config import settings\nfrom app.core.error_handler import app_exception_handler\nfrom app.core.exceptions import AppError\nfrom app.core.static import register_static_routes\nfrom app.utils.logger import configure_logger\nfrom app.services.account import account_manager\nfrom app.services.session import session_manager\nfrom app.services.tool_call import tool_call_manager\nfrom app.services.cache import cache_service\n\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    \"\"\"Application lifespan manager.\"\"\"\n    logger.info(\"Starting Clove...\")\n\n    configure_logger()\n\n    # Load accounts\n    account_manager.load_accounts()\n\n    for cookie in settings.cookies:\n        await account_manager.add_account(cookie_value=cookie)\n\n    # Start tasks\n    await account_manager.start_task()\n    await session_manager.start_cleanup_task()\n    await tool_call_manager.start_cleanup_task()\n    await cache_service.start_cleanup_task()\n\n    yield\n\n    logger.info(\"Shutting down Clove...\")\n\n    # Save accounts\n    account_manager.save_accounts()\n\n    # Stop tasks\n    await account_manager.stop_task()\n    await session_manager.cleanup_all()\n    await tool_call_manager.cleanup_all()\n    await cache_service.cleanup_all()\n\n\napp = FastAPI(\n    title=\"Clove\",\n    description=\"A Claude.ai reverse proxy\",\n    version=\"0.1.0\",\n    lifespan=lifespan,\n)\n\napp.add_middleware(\n    CORSMiddleware,\n    allow_origins=[\"*\"],\n    allow_credentials=True,\n    allow_methods=[\"*\"],\n    allow_headers=[\"*\"],\n)\n\n# Include routers\napp.include_router(api_router)\n\n# Static files\nregister_static_routes(app)\n\n# Exception handlers\napp.add_exception_handler(AppError, app_exception_handler)\n\n\n# Health check\n@app.get(\"/health\")\nasync def health():\n    \"\"\"Health check endpoint.\"\"\"\n    stats = await account_manager.get_status()\n    return {\"status\": \"healthy\" if stats[\"valid_accounts\"] > 0 else \"degraded\"}\n\n\ndef main():\n    \"\"\"Main entry point for the application.\"\"\"\n    import uvicorn\n\n    uvicorn.run(\n        \"app.main:app\",\n        host=settings.host,\n        port=settings.port,\n        reload=False,\n    )\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "app/models/__init__.py",
    "content": ""
  },
  {
    "path": "app/models/claude.py",
    "content": "from typing import Optional, List, Union, Literal, Dict, Any\nfrom pydantic import BaseModel, ConfigDict, Field, model_validator\nfrom enum import Enum\n\n\nclass Role(str, Enum):\n    USER = \"user\"\n    ASSISTANT = \"assistant\"\n\n\nclass ImageType(str, Enum):\n    JPEG = \"image/jpeg\"\n    PNG = \"image/png\"\n    GIF = \"image/gif\"\n    WEBP = \"image/webp\"\n\n\n# Image sources\nclass Base64ImageSource(BaseModel):\n    type: Literal[\"base64\"] = \"base64\"\n    media_type: ImageType = Field(..., description=\"MIME type of the image\")\n    data: str = Field(..., description=\"Base64 encoded image data\")\n\n\nclass URLImageSource(BaseModel):\n    type: Literal[\"url\"] = \"url\"\n    url: str = Field(..., description=\"URL of the image\")\n\n\nclass FileImageSource(BaseModel):\n    type: Literal[\"file\"] = \"file\"\n    file_uuid: str = Field(..., description=\"UUID of the uploaded file\")\n\n\n# Web search result\nclass WebSearchResult(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"web_search_result\"]\n    title: str\n    url: str\n    encrypted_content: str\n    page_age: Optional[str] = None\n\n\n# Cache control\nclass CacheControl(BaseModel):\n    type: Literal[\"ephemeral\"]\n\n\n# Content types\nclass TextContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"text\"]\n    text: str\n    cache_control: Optional[CacheControl] = None\n\n\nclass ImageContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"image\"]\n    source: Base64ImageSource | URLImageSource | FileImageSource\n    cache_control: Optional[CacheControl] = None\n\n\nclass ThinkingContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"thinking\"]\n    thinking: str\n\n\n# redacted_thinking 块：API 可能返回被审查的思考内容\nclass RedactedThinkingContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"redacted_thinking\"]\n    data: str\n\n\nclass ToolUseContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"tool_use\"]\n    id: str\n    name: str\n    input: Dict[str, Any]\n    cache_control: Optional[CacheControl] = None\n\n\nclass ToolResultContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"tool_result\"]\n    tool_use_id: str\n    content: str | List[TextContent | ImageContent]\n    is_error: Optional[bool] = False\n    cache_control: Optional[CacheControl] = None\n\n\nclass ServerToolUseContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"server_tool_use\"]\n    id: str\n    name: str\n    input: Dict[str, Any]\n    cache_control: Optional[CacheControl] = None\n\n\nclass WebSearchToolResultContent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"web_search_tool_result\"]\n    tool_use_id: str\n    content: List[WebSearchResult]\n    cache_control: Optional[CacheControl] = None\n\n\nContentBlock = Union[\n    TextContent,\n    ImageContent,\n    ThinkingContent,\n    RedactedThinkingContent,\n    ToolUseContent,\n    ToolResultContent,\n    ServerToolUseContent,\n    WebSearchToolResultContent,\n]\n\n\nclass InputMessage(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    role: Role\n    content: Union[str, List[ContentBlock]]\n\n\nclass ThinkingOptions(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"enabled\", \"disabled\", \"adaptive\"] = \"disabled\"\n    budget_tokens: Optional[int] = None\n\n\nclass ToolChoice(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"auto\", \"any\", \"tool\", \"none\"] = \"auto\"\n    name: Optional[str] = None\n    disable_parallel_tool_use: Optional[bool] = None\n\n\nclass CustomToolSpec(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    description: Optional[str] = None\n    input_schema: Optional[Any] = None\n\n\nclass Tool(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Optional[str] = None\n    name: Optional[str] = None\n    input_schema: Optional[Any] = None\n    description: Optional[str] = None\n    custom: Optional[CustomToolSpec] = None\n\n\nclass OutputConfig(BaseModel):\n    \"\"\"Output configuration (effort, format, etc). effort and structured outputs are now GA.\"\"\"\n\n    model_config = ConfigDict(extra=\"allow\")\n    effort: Optional[Literal[\"low\", \"medium\", \"high\", \"max\"]] = None\n\n\nclass OutputFormat(BaseModel):\n    \"\"\"Output format for structured outputs (deprecated, use output_config.format instead).\"\"\"\n\n    model_config = ConfigDict(extra=\"allow\", populate_by_name=True, serialize_by_alias=True)\n    type: Literal[\"json_schema\"]\n    schema_: Optional[Dict[str, Any]] = Field(default=None, alias=\"schema\")\n\n\nclass ServerToolUsage(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    web_search_requests: Optional[int] = None\n\n\nclass Usage(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    input_tokens: int\n    output_tokens: int\n    cache_creation_input_tokens: Optional[int] = 0\n    cache_read_input_tokens: Optional[int] = 0\n    server_tool_use: Optional[ServerToolUsage] = None\n\n\nclass MessagesAPIRequest(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    model: str = Field(default=\"claude-opus-4-20250514\")\n    messages: List[InputMessage]\n    max_tokens: int = Field(default=8192, ge=1)\n    system: Optional[str | List[TextContent]] = None\n    temperature: Optional[float] = Field(default=None, ge=0, le=1)\n    top_p: Optional[float] = Field(default=None, ge=0, le=1)\n    top_k: Optional[int] = Field(default=None, ge=0)\n    stop_sequences: Optional[List[str]] = None\n    stream: Optional[bool] = False\n    metadata: Optional[Dict[str, Any]] = None\n    thinking: Optional[ThinkingOptions] = None\n    tool_choice: Optional[ToolChoice] = None\n    tools: Optional[List[Tool]] = None\n    output_config: Optional[OutputConfig] = None\n    output_format: Optional[OutputFormat] = None\n\n    @model_validator(mode=\"after\")\n    def validate_thinking_tokens(self) -> \"MessagesAPIRequest\":\n        \"\"\"Ensure max_tokens > thinking.budget_tokens when thinking is enabled.\"\"\"\n        if (\n            self.thinking\n            and self.thinking.type == \"enabled\"\n            and self.thinking.budget_tokens is not None\n            and self.max_tokens <= self.thinking.budget_tokens\n        ):\n            self.max_tokens = self.thinking.budget_tokens + 1\n        return self\n\n\nclass Message(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    id: str\n    type: Literal[\"message\"]\n    role: Literal[\"assistant\"]\n    content: List[ContentBlock]\n    model: str\n    stop_reason: Optional[\n        Literal[\n            \"end_turn\",\n            \"max_tokens\",\n            \"stop_sequence\",\n            \"tool_use\",\n            \"pause_turn\",\n            \"refusal\",\n        ]\n    ] = None\n    stop_sequence: Optional[str] = None\n    usage: Optional[Usage] = None\n"
  },
  {
    "path": "app/models/internal.py",
    "content": "from typing import List, Optional\nfrom pydantic import BaseModel, Field\nfrom .claude import Tool\n\n\nclass Attachment(BaseModel):\n    extracted_content: str\n    file_name: str\n    file_type: str\n    file_size: int\n\n    @classmethod\n    def from_text(cls, content: str) -> \"Attachment\":\n        \"\"\"Create text attachment.\"\"\"\n        return cls(\n            extracted_content=content,\n            file_name=\"paste.txt\",\n            file_type=\"txt\",\n            file_size=len(content),\n        )\n\n\nclass ClaudeWebRequest(BaseModel):\n    max_tokens_to_sample: int\n    attachments: List[Attachment]\n    files: List[str] = Field(default_factory=list)\n    model: Optional[str] = None\n    rendering_mode: str = \"messages\"\n    prompt: str = \"\"\n    timezone: str\n    tools: List[Tool] = Field(default_factory=list)\n\n\nclass UploadResponse(BaseModel):\n    file_uuid: str\n"
  },
  {
    "path": "app/models/streaming.py",
    "content": "from typing import Optional, Union, Dict, Any, Literal\nfrom pydantic import BaseModel, RootModel, ConfigDict\n\nfrom .claude import ContentBlock, Message, Usage\n\n\n# Base event types\nclass BaseEvent(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: str\n\n\n# Delta types\nclass TextDelta(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"text_delta\"]\n    text: str\n\n\nclass InputJsonDelta(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"input_json_delta\"]\n    partial_json: str\n\n\nclass ThinkingDelta(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"thinking_delta\"]\n    thinking: str\n\n\nclass SignatureDelta(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: Literal[\"signature_delta\"]\n    signature: str\n\n\nDelta = Union[TextDelta, InputJsonDelta, ThinkingDelta, SignatureDelta]\n\n\nclass MessageDeltaData(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    stop_reason: Optional[\n        Literal[\"end_turn\", \"max_tokens\", \"stop_sequence\", \"tool_use\", \"pause_turn\", \"refusal\"]\n    ] = None\n    stop_sequence: Optional[str] = None\n\n\n# Error model\nclass ErrorInfo(BaseModel):\n    model_config = ConfigDict(extra=\"allow\")\n    type: str\n    message: str\n\n\n# Event models\nclass MessageStartEvent(BaseEvent):\n    type: Literal[\"message_start\"]\n    message: Message\n\n\nclass ContentBlockStartEvent(BaseEvent):\n    type: Literal[\"content_block_start\"]\n    index: int\n    content_block: ContentBlock\n\n\nclass ContentBlockDeltaEvent(BaseEvent):\n    type: Literal[\"content_block_delta\"]\n    index: int\n    delta: Delta\n\n\nclass ContentBlockStopEvent(BaseEvent):\n    type: Literal[\"content_block_stop\"]\n    index: int\n\n\nclass MessageDeltaEvent(BaseEvent):\n    type: Literal[\"message_delta\"]\n    delta: MessageDeltaData\n    usage: Optional[Usage] = None\n\n\nclass MessageStopEvent(BaseEvent):\n    type: Literal[\"message_stop\"]\n\n\nclass PingEvent(BaseEvent):\n    type: Literal[\"ping\"]\n\n\nclass ErrorEvent(BaseEvent):\n    type: Literal[\"error\"]\n    error: ErrorInfo\n\n\nclass UnknownEvent(BaseEvent):\n    type: str\n    data: Dict[str, Any]\n\n\n# Union of all streaming event types\nclass StreamingEvent(RootModel):\n    root: Union[\n        MessageStartEvent,\n        ContentBlockStartEvent,\n        ContentBlockDeltaEvent,\n        ContentBlockStopEvent,\n        MessageDeltaEvent,\n        MessageStopEvent,\n        PingEvent,\n        ErrorEvent,\n        UnknownEvent,\n    ]\n"
  },
  {
    "path": "app/processors/__init__.py",
    "content": "from app.processors.base import BaseProcessor, BaseContext\nfrom app.processors.claude_ai import (\n    ClaudeAIContext,\n    TestMessageProcessor,\n    ClaudeWebProcessor,\n    EventParsingProcessor,\n    StreamingResponseProcessor,\n    MessageCollectorProcessor,\n    NonStreamingResponseProcessor,\n    TokenCounterProcessor,\n    ToolResultProcessor,\n    ToolCallEventProcessor,\n    StopSequencesProcessor,\n)\n\n__all__ = [\n    # Base classes\n    \"BaseProcessor\",\n    \"BaseContext\",\n    # Claude AI Pipeline\n    \"ClaudeAIContext\",\n    \"TestMessageProcessor\",\n    \"ClaudeWebProcessor\",\n    \"EventParsingProcessor\",\n    \"StreamingResponseProcessor\",\n    \"MessageCollectorProcessor\",\n    \"NonStreamingResponseProcessor\",\n    \"TokenCounterProcessor\",\n    \"ToolResultProcessor\",\n    \"ToolCallEventProcessor\",\n    \"StopSequencesProcessor\",\n]\n"
  },
  {
    "path": "app/processors/base.py",
    "content": "\"\"\"Base classes for request processing pipeline.\"\"\"\n\nfrom abc import ABC, abstractmethod\nfrom dataclasses import dataclass, field\nfrom typing import Optional\n\nfrom fastapi import Request\nfrom fastapi.responses import StreamingResponse, JSONResponse\n\n\n@dataclass\nclass BaseContext:\n    \"\"\"Base context passed between processors in the pipeline.\"\"\"\n\n    original_request: Request\n    response: Optional[StreamingResponse | JSONResponse] = None\n    metadata: dict = field(\n        default_factory=dict\n    )  # For storing custom data between processors\n\n\nclass BaseProcessor(ABC):\n    \"\"\"Base class for all request processors.\"\"\"\n\n    @abstractmethod\n    async def process(self, context: BaseContext) -> BaseContext:\n        \"\"\"\n        Process the request context.\n\n        Args:\n            context: The processing context\n\n        Returns:\n            Updated context.\n        \"\"\"\n        pass\n\n    @property\n    def name(self) -> str:\n        \"\"\"Get the processor name.\"\"\"\n        return self.__class__.__name__\n"
  },
  {
    "path": "app/processors/claude_ai/__init__.py",
    "content": "from app.processors.claude_ai.context import ClaudeAIContext\nfrom app.processors.claude_ai.pipeline import ClaudeAIPipeline\nfrom app.processors.claude_ai.tavern_test_message_processor import TestMessageProcessor\nfrom app.processors.claude_ai.claude_web_processor import ClaudeWebProcessor\nfrom app.processors.claude_ai.claude_api_processor import ClaudeAPIProcessor\nfrom app.processors.claude_ai.event_parser_processor import EventParsingProcessor\nfrom app.processors.claude_ai.streaming_response_processor import (\n    StreamingResponseProcessor,\n)\nfrom app.processors.claude_ai.message_collector_processor import (\n    MessageCollectorProcessor,\n)\nfrom app.processors.claude_ai.non_streaming_response_processor import (\n    NonStreamingResponseProcessor,\n)\nfrom app.processors.claude_ai.token_counter_processor import TokenCounterProcessor\nfrom app.processors.claude_ai.tool_result_processor import ToolResultProcessor\nfrom app.processors.claude_ai.tool_call_event_processor import ToolCallEventProcessor\nfrom app.processors.claude_ai.stop_sequences_processor import StopSequencesProcessor\nfrom app.processors.claude_ai.model_injector_processor import ModelInjectorProcessor\n\n__all__ = [\n    \"ClaudeAIContext\",\n    \"ClaudeAIPipeline\",\n    \"TestMessageProcessor\",\n    \"ClaudeWebProcessor\",\n    \"ClaudeAPIProcessor\",\n    \"EventParsingProcessor\",\n    \"StreamingResponseProcessor\",\n    \"MessageCollectorProcessor\",\n    \"NonStreamingResponseProcessor\",\n    \"TokenCounterProcessor\",\n    \"ToolResultProcessor\",\n    \"ToolCallEventProcessor\",\n    \"StopSequencesProcessor\",\n    \"ModelInjectorProcessor\",\n]\n"
  },
  {
    "path": "app/processors/claude_ai/claude_api_processor.py",
    "content": "from app.core.http_client import (\n    Response,\n    AsyncSession,\n    create_session,\n)\nfrom datetime import datetime, timedelta, UTC\nfrom typing import Dict\nfrom loguru import logger\nfrom fastapi.responses import StreamingResponse\n\nfrom app.models.claude import MessagesAPIRequest, TextContent\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.services.account import account_manager\nfrom app.services.cache import cache_service\nfrom app.core.exceptions import (\n    ClaudeHttpError,\n    ClaudeRateLimitedError,\n    InvalidModelNameError,\n    NoAccountsAvailableError,\n    OAuthAuthenticationNotAllowedError,\n)\nfrom app.core.config import settings\n\n\nclass ClaudeAPIProcessor(BaseProcessor):\n    \"\"\"Processor that calls Claude Messages API directly using OAuth authentication.\"\"\"\n\n    def __init__(self):\n        self.messages_api_url = (\n            settings.claude_api_baseurl.encoded_string().rstrip(\"/\") + \"/v1/messages\"\n        )\n\n    async def _request_messages_api(\n        self, session: AsyncSession, request_json: str, headers: Dict[str, str]\n    ) -> Response:\n        \"\"\"Make HTTP request with retry mechanism for curl_cffi exceptions.\"\"\"\n        response: Response = await session.request(\n            \"POST\",\n            self.messages_api_url,\n            data=request_json,\n            headers=headers,\n            stream=True,\n        )\n        return response\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Process Claude API request using OAuth authentication.\n\n        Requires:\n            - messages_api_request in context\n\n        Produces:\n            - response in context (StreamingResponse)\n        \"\"\"\n        if context.response:\n            logger.debug(\"Skipping ClaudeAPIProcessor due to existing response\")\n            return context\n\n        if not context.messages_api_request:\n            logger.warning(\n                \"Skipping ClaudeAPIProcessor due to missing messages_api_request\"\n            )\n            return context\n\n        self._insert_system_message(context)\n\n        try:\n            # First try to get account from cache service\n            cached_account_id, checkpoints = cache_service.process_messages(\n                context.messages_api_request.model,\n                context.messages_api_request.messages,\n                context.messages_api_request.system,\n            )\n\n            account = None\n            if cached_account_id:\n                account = await account_manager.get_account_by_id(cached_account_id)\n                if account:\n                    logger.info(f\"Using cached account: {cached_account_id[:8]}...\")\n\n            # If no cached account or account not available, get a new one\n            if not account:\n                account = await account_manager.get_account_for_oauth(\n                    is_max=True\n                    if (context.messages_api_request.model in settings.max_models)\n                    else None\n                )\n\n            with account:\n                request_json = context.messages_api_request.model_dump_json(\n                    exclude_none=True\n                )\n                headers = self._prepare_headers(\n                    account.oauth_token.access_token,\n                    context.messages_api_request,\n                    context.original_request,\n                )\n\n                session = create_session(\n                    proxy=settings.proxy_url,\n                    timeout=settings.request_timeout,\n                    impersonate=\"chrome\",\n                    follow_redirects=False,\n                )\n\n                response = await self._request_messages_api(\n                    session, request_json, headers\n                )\n\n                resets_at = response.headers.get(\"anthropic-ratelimit-unified-reset\")\n                if resets_at:\n                    try:\n                        resets_at = int(resets_at)\n                        account.resets_at = datetime.fromtimestamp(resets_at, tz=UTC)\n                    except ValueError:\n                        logger.error(\n                            f\"Invalid resets_at format from Claude API: {resets_at}\"\n                        )\n                        account.resets_at = None\n\n                # Handle rate limiting\n                if response.status_code == 429:\n                    next_hour = datetime.now(UTC).replace(\n                        minute=0, second=0, microsecond=0\n                    ) + timedelta(hours=1)\n                    raise ClaudeRateLimitedError(\n                        resets_at=account.resets_at or next_hour\n                    )\n\n                if response.status_code >= 400:\n                    error_data = await response.json()\n\n                    if (\n                        response.status_code == 400\n                        and error_data.get(\"error\", {}).get(\"message\")\n                        == \"system: Invalid model name\"\n                    ):\n                        raise InvalidModelNameError(context.messages_api_request.model)\n\n                    if (\n                        response.status_code == 401\n                        and error_data.get(\"error\", {}).get(\"message\")\n                        == \"OAuth authentication is currently not allowed for this organization.\"\n                    ):\n                        raise OAuthAuthenticationNotAllowedError()\n\n                    logger.error(\n                        f\"Claude API error: {response.status_code} - {error_data}\"\n                    )\n                    raise ClaudeHttpError(\n                        url=self.messages_api_url,\n                        status_code=response.status_code,\n                        error_type=error_data.get(\"error\", {}).get(\"type\", \"unknown\"),\n                        error_message=error_data.get(\"error\", {}).get(\n                            \"message\", \"Unknown error\"\n                        ),\n                    )\n\n                async def stream_response():\n                    async for chunk in response.aiter_bytes():\n                        yield chunk\n\n                    await session.close()\n\n                filtered_headers = {}\n                for key, value in response.headers.items():\n                    if key.lower() in [\"content-encoding\", \"content-length\"]:\n                        logger.debug(f\"Filtering out header: {key}: {value}\")\n                        continue\n                    filtered_headers[key] = value\n\n                context.response = StreamingResponse(\n                    stream_response(),\n                    status_code=response.status_code,\n                    headers=filtered_headers,\n                )\n\n                # Stop pipeline on success\n                context.metadata[\"stop_pipeline\"] = True\n                logger.info(\"Successfully processed request via Claude API\")\n\n                # Store checkpoints in cache service after successful request\n                if checkpoints and account:\n                    cache_service.add_checkpoints(\n                        checkpoints, account.organization_uuid\n                    )\n\n        except (NoAccountsAvailableError, InvalidModelNameError):\n            logger.debug(\"No accounts available for Claude API, continuing pipeline\")\n\n        return context\n\n    def _insert_system_message(self, context: ClaudeAIContext) -> None:\n        \"\"\"Insert system message into the request.\"\"\"\n\n        request = context.messages_api_request\n\n        # Handle system field\n        system_message_text = (\n            \"You are Claude Code, Anthropic's official CLI for Claude.\"\n        )\n        system_message = TextContent(type=\"text\", text=system_message_text)\n\n        if isinstance(request.system, str) and request.system:\n            request.system = [\n                system_message,\n                TextContent(type=\"text\", text=request.system),\n            ]\n        elif isinstance(request.system, list) and request.system:\n            if request.system[0].text == system_message_text:\n                logger.debug(\"System message already exists, skipping injection.\")\n            else:\n                request.system = [system_message] + request.system\n        else:\n            request.system = [system_message]\n\n    def _prepare_headers(\n        self,\n        access_token: str,\n        request: MessagesAPIRequest,\n        original_request=None,\n    ) -> Dict[str, str]:\n        \"\"\"Prepare headers for Claude API request.\n\n        Beta headers: oauth 是 OAuth 认证必需的。\n        effort 和 structured-outputs 已 GA，不再需要 beta header。\n        客户端的 anthropic-beta header 会被透传（去重合并）。\n        \"\"\"\n        # oauth beta 是 OAuth 认证必需的\n        beta_features = [\"oauth-2025-04-20\"]\n\n        # 透传客户端 anthropic-beta header，与内部 beta 去重合并\n        if original_request:\n            client_beta = original_request.headers.get(\"anthropic-beta\", \"\")\n            if client_beta:\n                for beta in client_beta.split(\",\"):\n                    beta = beta.strip()\n                    if beta and beta not in beta_features:\n                        beta_features.append(beta)\n\n        return {\n            \"Authorization\": f\"Bearer {access_token}\",\n            \"anthropic-beta\": \",\".join(beta_features),\n            \"anthropic-version\": \"2023-06-01\",\n            \"Content-Type\": \"application/json\",\n        }\n"
  },
  {
    "path": "app/processors/claude_ai/claude_web_processor.py",
    "content": "import time\nimport base64\nimport random\nimport string\nfrom typing import List\nfrom loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.services.session import session_manager\nfrom app.models.internal import ClaudeWebRequest, Attachment\nfrom app.core.exceptions import NoValidMessagesError\nfrom app.core.config import settings\nfrom app.utils.messages import process_messages\n\n\nclass ClaudeWebProcessor(BaseProcessor):\n    \"\"\"Claude AI processor that handles session management, request building, and sending to Claude AI.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Claude AI processor that:\n        1. Gets or creates a Claude session\n        2. Builds ClaudeWebRequest from messages_api_request\n        3. Sends the request to Claude.ai\n\n        Requires:\n            - messages_api_request in context\n\n        Produces:\n            - claude_session in context\n            - claude_web_request in context\n            - original_stream in context\n        \"\"\"\n        if context.original_stream:\n            logger.debug(\"Skipping ClaudeWebProcessor due to existing original_stream\")\n            return context\n\n        if not context.messages_api_request:\n            logger.warning(\n                \"Skipping ClaudeWebProcessor due to missing messages_api_request\"\n            )\n            return context\n\n        # Step 1: Get or create Claude session\n        if not context.claude_session:\n            session_id = context.metadata.get(\"session_id\")\n            if not session_id:\n                session_id = f\"session_{int(time.time() * 1000)}\"\n                context.metadata[\"session_id\"] = session_id\n\n            logger.debug(f\"Creating new session: {session_id}\")\n            context.claude_session = await session_manager.get_or_create_session(\n                session_id\n            )\n\n        # Step 2: Build ClaudeWebRequest\n        if not context.claude_web_request:\n            request = context.messages_api_request\n\n            if not request.messages:\n                raise NoValidMessagesError()\n\n            merged_text, images = await process_messages(\n                request.messages, request.system\n            )\n            if not merged_text:\n                raise NoValidMessagesError()\n\n            if settings.padtxt_length > 0:\n                pad_tokens = settings.pad_tokens or (\n                    string.ascii_letters + string.digits\n                )\n                pad_text = \"\".join(random.choices(pad_tokens, k=settings.padtxt_length))\n                merged_text = pad_text + merged_text\n                logger.debug(\n                    f\"Added {settings.padtxt_length} padding tokens to the beginning of the message\"\n                )\n\n            image_file_ids: List[str] = []\n            if images:\n                for i, image_source in enumerate(images):\n                    try:\n                        # Convert base64 to bytes\n                        image_data = base64.b64decode(image_source.data)\n\n                        # Upload to Claude\n                        file_id = await context.claude_session.upload_file(\n                            file_data=image_data,\n                            filename=f\"image_{i}.png\",  # Default filename\n                            content_type=image_source.media_type,\n                        )\n                        image_file_ids.append(file_id)\n                        logger.debug(f\"Uploaded image {i}: {file_id}\")\n                    except Exception as e:\n                        logger.error(f\"Failed to upload image {i}: {e}\")\n\n            await context.claude_session._ensure_conversation_initialized()\n\n            paprika_mode = (\n                \"extended\"\n                if (\n                    context.claude_session.account.is_pro\n                    and request.thinking\n                    and request.thinking.type in (\"enabled\", \"adaptive\")\n                )\n                else None\n            )\n\n            await context.claude_session.set_paprika_mode(paprika_mode)\n\n            web_request = ClaudeWebRequest(\n                max_tokens_to_sample=request.max_tokens,\n                attachments=[Attachment.from_text(merged_text)],\n                files=image_file_ids,\n                model=request.model,\n                rendering_mode=\"messages\",\n                prompt=settings.custom_prompt or \"\",\n                timezone=\"UTC\",\n                tools=request.tools or [],\n            )\n\n            context.claude_web_request = web_request\n            logger.debug(f\"Built web request with {len(image_file_ids)} images\")\n\n        # Step 3: Send to Claude\n        logger.debug(\n            f\"Sending request to Claude.ai for session {context.claude_session.session_id}\"\n        )\n\n        request_dict = context.claude_web_request.model_dump(exclude_none=True)\n        context.original_stream = await context.claude_session.send_message(\n            request_dict\n        )\n\n        return context\n"
  },
  {
    "path": "app/processors/claude_ai/context.py",
    "content": "from dataclasses import dataclass\nfrom typing import Optional, AsyncIterator\n\nfrom app.core.claude_session import ClaudeWebSession\nfrom app.models.claude import Message, MessagesAPIRequest\nfrom app.models.internal import ClaudeWebRequest\nfrom app.models.streaming import StreamingEvent\nfrom app.processors.base import BaseContext\n\n\n@dataclass\nclass ClaudeAIContext(BaseContext):\n    messages_api_request: Optional[MessagesAPIRequest] = None\n    claude_web_request: Optional[ClaudeWebRequest] = None\n    claude_session: Optional[ClaudeWebSession] = None\n    original_stream: Optional[AsyncIterator[str]] = None\n    event_stream: Optional[AsyncIterator[StreamingEvent]] = None\n    collected_message: Optional[Message] = None\n"
  },
  {
    "path": "app/processors/claude_ai/event_parser_processor.py",
    "content": "from loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.services.event_processing.event_parser import EventParser\n\n\nclass EventParsingProcessor(BaseProcessor):\n    \"\"\"Processor that parses SSE streams into StreamingEvent objects.\"\"\"\n\n    def __init__(self):\n        super().__init__()\n        self.parser = EventParser()\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Parse the original_stream into event_stream.\n\n        Requires:\n            - original_stream in context\n\n        Produces:\n            - event_stream in context\n        \"\"\"\n        if context.event_stream:\n            logger.debug(\"Skipping EventParsingProcessor due to existing event_stream\")\n            return context\n\n        if not context.original_stream:\n            logger.warning(\n                \"Skipping EventParsingProcessor due to missing original_stream\"\n            )\n            return context\n\n        logger.debug(\"Starting event parsing from SSE stream\")\n        context.event_stream = self.parser.parse_stream(context.original_stream)\n\n        return context\n"
  },
  {
    "path": "app/processors/claude_ai/message_collector_processor.py",
    "content": "import json5\nfrom typing import AsyncIterator\nfrom loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.streaming import (\n    Delta,\n    StreamingEvent,\n    MessageStartEvent,\n    ContentBlockStartEvent,\n    ContentBlockDeltaEvent,\n    ContentBlockStopEvent,\n    MessageDeltaEvent,\n    MessageStopEvent,\n    ErrorEvent,\n    ErrorInfo,\n    TextDelta,\n    InputJsonDelta,\n    ThinkingDelta,\n)\nfrom app.models.claude import (\n    ContentBlock,\n    ServerToolUseContent,\n    TextContent,\n    ThinkingContent,\n    ToolResultContent,\n    ToolUseContent,\n)\n\n\nclass MessageCollectorProcessor(BaseProcessor):\n    \"\"\"Processor that collects streaming events into a Message object without consuming the stream.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Collect streaming events into a Message object and update it in real-time.\n        This processor runs for both streaming and non-streaming requests.\n\n        Requires:\n            - event_stream in context\n\n        Produces:\n            - collected_message in context (updated in real-time)\n            - event_stream in context (wrapped to collect messages without consuming)\n        \"\"\"\n        if not context.event_stream:\n            logger.warning(\n                \"Skipping MessageCollectorProcessor due to missing event_stream\"\n            )\n            return context\n\n        logger.debug(\"Setting up message collection from stream\")\n\n        original_stream = context.event_stream\n\n        new_stream = self._collect_messages_generator(original_stream, context)\n        context.event_stream = new_stream\n\n        return context\n\n    async def _collect_messages_generator(\n        self, event_stream: AsyncIterator[StreamingEvent], context: ClaudeAIContext\n    ) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Generator that collects messages from the stream without consuming events.\n        Updates context.collected_message in real-time.\n        \"\"\"\n        context.collected_message = None\n\n        async for event in event_stream:\n            # Process the event to build/update the message\n            if isinstance(event.root, MessageStartEvent):\n                context.collected_message = event.root.message.model_copy(deep=True)\n                logger.debug(f\"Message started: {context.collected_message.id}\")\n\n            elif isinstance(event.root, ContentBlockStartEvent):\n                if context.collected_message:\n                    while len(context.collected_message.content) <= event.root.index:\n                        context.collected_message.content.append(None)\n                    context.collected_message.content[event.root.index] = (\n                        event.root.content_block.model_copy(deep=True)\n                    )\n                    logger.debug(\n                        f\"Content block {event.root.index} started: {event.root.content_block.type}\"\n                    )\n\n            elif isinstance(event.root, ContentBlockDeltaEvent):\n                if context.collected_message and event.root.index < len(\n                    context.collected_message.content\n                ):\n                    self._apply_delta(\n                        context.collected_message.content[event.root.index],\n                        event.root.delta,\n                    )\n\n            elif isinstance(event.root, ContentBlockStopEvent):\n                # Boundary checking to prevent IndexError caused by refusal responses\n                if (\n                    context.collected_message\n                    and event.root.index < len(context.collected_message.content)\n                ):\n                    block = context.collected_message.content[event.root.index]\n                    if isinstance(block, (ToolUseContent, ServerToolUseContent)):\n                        if hasattr(block, \"input_json\") and block.input_json:\n                            block.input = json5.loads(block.input_json)\n                            del block.input_json\n                    if isinstance(block, ToolResultContent):\n                        if hasattr(block, \"content_json\") and block.content_json:\n                            block = ToolResultContent(\n                                **block.model_dump(exclude={\"content\"}),\n                                content=json5.loads(block.content_json),\n                            )\n                            del block.content_json\n                            context.collected_message.content[event.root.index] = block\n                    logger.debug(f\"Content block {event.root.index} stopped\")\n                else:\n                    logger.debug(\n                        f\"Content block {event.root.index} stop skipped (no corresponding start)\"\n                    )\n\n            elif isinstance(event.root, MessageDeltaEvent):\n                if context.collected_message and event.root.delta:\n                    if event.root.delta.stop_reason:\n                        context.collected_message.stop_reason = (\n                            event.root.delta.stop_reason\n                        )\n                        # When refusal is detected and content is empty, yield ErrorEvent\n                        if (\n                            event.root.delta.stop_reason == \"refusal\"\n                            and not context.collected_message.content\n                        ):\n                            logger.warning(\"Request refused by Claude's safety filter\")\n                            error_event = StreamingEvent(\n                                root=ErrorEvent(\n                                    type=\"error\",\n                                    error=ErrorInfo(\n                                        type=\"refusal\",\n                                        message=\"Chat paused: Claude's safety filters flagged this message. This occasionally happens with normal, safe messages. Try rephrasing or using a different model.\"\n                                    )\n                                )\n                            )\n                            yield error_event\n                    if event.root.delta.stop_sequence:\n                        context.collected_message.stop_sequence = (\n                            event.root.delta.stop_sequence\n                        )\n                if context.collected_message and event.root.usage:\n                    context.collected_message.usage = event.root.usage\n\n            elif isinstance(event.root, MessageStopEvent):\n                if context.collected_message:\n                    context.collected_message.content = [\n                        block\n                        for block in context.collected_message.content\n                        if block is not None\n                    ]\n                    logger.debug(\n                        f\"Message stopped with {len(context.collected_message.content)} content blocks\"\n                    )\n\n            elif isinstance(event.root, ErrorEvent):\n                logger.warning(f\"Error event received: {event.root.error.message}\")\n\n            # Yield the event without modification\n            yield event\n\n        if context.collected_message:\n            logger.debug(\n                f\"Collected message:\\n{context.collected_message.model_dump()}\"\n            )\n\n    def _apply_delta(self, content_block: ContentBlock, delta: Delta) -> None:\n        \"\"\"Apply a delta to a content block.\"\"\"\n        if isinstance(delta, TextDelta):\n            if isinstance(content_block, TextContent):\n                content_block.text += delta.text\n        elif isinstance(delta, ThinkingDelta):\n            if isinstance(content_block, ThinkingContent):\n                content_block.thinking += delta.thinking\n        elif isinstance(delta, InputJsonDelta):\n            if isinstance(content_block, (ToolUseContent, ServerToolUseContent)):\n                if hasattr(content_block, \"input_json\"):\n                    content_block.input_json += delta.partial_json\n                else:\n                    content_block.input_json = delta.partial_json\n            if isinstance(content_block, ToolResultContent):\n                if hasattr(content_block, \"content_json\"):\n                    content_block.content_json += delta.partial_json\n                else:\n                    content_block.content_json = delta.partial_json\n"
  },
  {
    "path": "app/processors/claude_ai/model_injector_processor.py",
    "content": "from typing import AsyncIterator\nfrom loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.streaming import (\n    MessageStartEvent,\n    StreamingEvent,\n)\n\n\nclass ModelInjectorProcessor(BaseProcessor):\n    \"\"\"Processor that injects model information when it's missing from MessageStartEvent.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Intercept MessageStartEvent and add model information if missing.\n\n        Requires:\n            - event_stream in context\n            - messages_api_request in context (for model information)\n\n        Produces:\n            - event_stream with updated MessageStartEvent containing model\n        \"\"\"\n        if not context.event_stream:\n            logger.warning(\n                \"Skipping ModelInjectorProcessor due to missing event_stream\"\n            )\n            return context\n\n        if not context.messages_api_request:\n            logger.warning(\n                \"Skipping ModelInjectorProcessor due to missing messages_api_request\"\n            )\n            return context\n\n        logger.debug(\"Setting up model injection for stream\")\n\n        original_stream = context.event_stream\n        new_stream = self._inject_model_generator(original_stream, context)\n        context.event_stream = new_stream\n\n        return context\n\n    async def _inject_model_generator(\n        self,\n        event_stream: AsyncIterator[StreamingEvent],\n        context: ClaudeAIContext,\n    ) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Generator that adds model to MessageStartEvent if missing.\n        \"\"\"\n        # Get model from request\n        model = context.messages_api_request.model\n\n        async for event in event_stream:\n            if isinstance(event.root, MessageStartEvent):\n                # Check if model is missing or empty\n                if not event.root.message.model:\n                    event.root.message.model = model\n                    logger.debug(f\"Injected model '{model}' into MessageStartEvent\")\n                else:\n                    logger.debug(\n                        f\"MessageStartEvent already has model: '{event.root.message.model}'\"\n                    )\n\n            yield event\n"
  },
  {
    "path": "app/processors/claude_ai/non_streaming_response_processor.py",
    "content": "from loguru import logger\nfrom fastapi.responses import JSONResponse\n\nfrom app.core.exceptions import ClaudeStreamingError, NoMessageError\nfrom app.models.streaming import ErrorEvent\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\n\n\nclass NonStreamingResponseProcessor(BaseProcessor):\n    \"\"\"Processor that builds a non-streaming JSON response from collected message.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Build a non-streaming JSON response from the collected message.\n        This processor only runs for non-streaming requests.\n\n        Requires:\n            - messages_api_request with stream=False\n            - collected_message in context (must consume entire stream first)\n\n        Produces:\n            - response (JSONResponse) in context\n        \"\"\"\n        if context.response:\n            logger.debug(\n                \"Skipping NonStreamingResponseProcessor due to existing response\"\n            )\n            return context\n\n        if context.messages_api_request and context.messages_api_request.stream is True:\n            logger.debug(\"Skipping NonStreamingResponseProcessor for streaming request\")\n            return context\n\n        if not context.event_stream:\n            logger.warning(\n                \"Skipping NonStreamingResponseProcessor due to missing event_stream\"\n            )\n            return context\n\n        logger.info(\"Building non-streaming response\")\n\n        # Consume the entire stream to ensure collected_message is complete\n        async for event in context.event_stream:\n            if isinstance(event.root, ErrorEvent):\n                raise ClaudeStreamingError(\n                    error_type=event.root.error.type,\n                    error_message=event.root.error.message,\n                )\n\n        if not context.collected_message:\n            logger.error(\"No message collected after consuming stream\")\n            raise NoMessageError()\n\n        context.response = JSONResponse(\n            content=context.collected_message.model_dump(exclude_none=True),\n            headers={\n                \"Content-Type\": \"application/json\",\n                \"Cache-Control\": \"no-cache\",\n            },\n        )\n\n        return context\n"
  },
  {
    "path": "app/processors/claude_ai/pipeline.py",
    "content": "from typing import List, Optional\nfrom loguru import logger\n\nfrom app.services.session import session_manager\nfrom app.processors.pipeline import ProcessingPipeline\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.processors.claude_ai.tavern_test_message_processor import TestMessageProcessor\nfrom app.processors.claude_ai.claude_web_processor import ClaudeWebProcessor\nfrom app.processors.claude_ai.claude_api_processor import ClaudeAPIProcessor\nfrom app.processors.claude_ai.event_parser_processor import EventParsingProcessor\nfrom app.processors.claude_ai.streaming_response_processor import (\n    StreamingResponseProcessor,\n)\nfrom app.processors.claude_ai.message_collector_processor import (\n    MessageCollectorProcessor,\n)\nfrom app.processors.claude_ai.non_streaming_response_processor import (\n    NonStreamingResponseProcessor,\n)\nfrom app.processors.claude_ai.token_counter_processor import TokenCounterProcessor\nfrom app.processors.claude_ai.tool_result_processor import ToolResultProcessor\nfrom app.processors.claude_ai.tool_call_event_processor import ToolCallEventProcessor\nfrom app.processors.claude_ai.stop_sequences_processor import StopSequencesProcessor\nfrom app.processors.claude_ai.model_injector_processor import ModelInjectorProcessor\n\n\nclass ClaudeAIPipeline(ProcessingPipeline):\n    def __init__(self, processors: Optional[List[BaseProcessor]] = None):\n        \"\"\"\n        Initialize the pipeline with processors.\n\n        Args:\n            processors: List of processors to use. If None, default processors are used.\n        \"\"\"\n        processors = (\n            [\n                TestMessageProcessor(),\n                ToolResultProcessor(),\n                ClaudeAPIProcessor(),\n                ClaudeWebProcessor(),\n                EventParsingProcessor(),\n                ModelInjectorProcessor(),\n                StopSequencesProcessor(),\n                ToolCallEventProcessor(),\n                MessageCollectorProcessor(),\n                TokenCounterProcessor(),\n                StreamingResponseProcessor(),\n                NonStreamingResponseProcessor(),\n            ]\n            if processors is None\n            else processors\n        )\n\n        super().__init__(processors)\n\n    async def process(\n        self,\n        context: ClaudeAIContext,\n    ) -> ClaudeAIContext:\n        \"\"\"\n        Process a Claude API request through the pipeline.\n\n        Args:\n            context: The processing context\n\n        Returns:\n            Updated context.\n\n        Raises:\n            Exception: If any processor fails or no response is generated\n        \"\"\"\n        try:\n            return await super().process(context)\n        except Exception as e:\n            if context.claude_session:\n                await session_manager.remove_session(context.claude_session.session_id)\n            logger.error(f\"Pipeline processing failed: {e}\")\n            raise e\n"
  },
  {
    "path": "app/processors/claude_ai/stop_sequences_processor.py",
    "content": "from typing import AsyncIterator, List\nfrom loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.streaming import (\n    StreamingEvent,\n    ContentBlockDeltaEvent,\n    ContentBlockStopEvent,\n    MessageDeltaEvent,\n    MessageStopEvent,\n    MessageDeltaData,\n    TextDelta,\n)\nfrom app.services.session import session_manager\n\n\nclass StopSequencesProcessor(BaseProcessor):\n    \"\"\"Processor that handles stop sequences in streaming responses.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Process streaming events to detect and handle stop sequences.\n\n        Requires:\n            - event_stream in context\n            - messages_api_request in context (for stop_sequences)\n\n        Produces:\n            - Modified event_stream that stops when a stop sequence is detected\n            - Injects MessageDelta and MessageStop events when stop sequence found\n        \"\"\"\n        if not context.event_stream:\n            logger.warning(\n                \"Skipping StopSequencesProcessor due to missing event_stream\"\n            )\n            return context\n\n        if not context.messages_api_request:\n            logger.warning(\n                \"Skipping StopSequencesProcessor due to missing messages_api_request\"\n            )\n            return context\n\n        stop_sequences = context.messages_api_request.stop_sequences\n        if not stop_sequences:\n            logger.debug(\"No stop sequences configured, skipping processor\")\n            return context\n\n        logger.debug(f\"Setting up stop sequences processing for: {stop_sequences}\")\n\n        original_stream = context.event_stream\n        new_stream = self._process_stop_sequences(\n            original_stream, stop_sequences, context\n        )\n        context.event_stream = new_stream\n\n        return context\n\n    async def _process_stop_sequences(\n        self,\n        event_stream: AsyncIterator[StreamingEvent],\n        stop_sequences: List[str],\n        context: ClaudeAIContext,\n    ) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Process events and stop when a stop sequence is detected.\n        Uses incremental matching with buffering.\n        \"\"\"\n        stop_sequences_set = set(stop_sequences)\n\n        buffer = \"\"\n        current_index = 0\n\n        # Track potential matches: (start_position, current_matched_text)\n        potential_matches = []\n\n        async for event in event_stream:\n            if isinstance(event.root, ContentBlockDeltaEvent) and isinstance(\n                event.root.delta, TextDelta\n            ):\n                text = event.root.delta.text\n                current_index = event.root.index\n\n                for char in text:\n                    buffer += char\n                    current_pos = len(buffer) - 1\n\n                    potential_matches.append((current_pos, \"\"))\n\n                    new_matches = []\n                    for start_pos, matched_text in potential_matches:\n                        extended_match = matched_text + char\n\n                        could_match = False\n                        for stop_seq in stop_sequences:\n                            if stop_seq.startswith(extended_match):\n                                could_match = True\n                                break\n\n                        if could_match:\n                            new_matches.append((start_pos, extended_match))\n\n                            if extended_match in stop_sequences_set:\n                                logger.debug(\n                                    f\"Stop sequence detected: '{extended_match}'\"\n                                )\n\n                                safe_text = buffer[:start_pos]\n\n                                if safe_text:\n                                    yield StreamingEvent(\n                                        root=ContentBlockDeltaEvent(\n                                            type=\"content_block_delta\",\n                                            index=current_index,\n                                            delta=TextDelta(\n                                                type=\"text_delta\", text=safe_text\n                                            ),\n                                        )\n                                    )\n\n                                yield StreamingEvent(\n                                    root=ContentBlockStopEvent(\n                                        type=\"content_block_stop\", index=current_index\n                                    )\n                                )\n\n                                yield StreamingEvent(\n                                    root=MessageDeltaEvent(\n                                        type=\"message_delta\",\n                                        delta=MessageDeltaData(\n                                            stop_reason=\"stop_sequence\",\n                                            stop_sequence=extended_match,\n                                        ),\n                                        usage=None,\n                                    )\n                                )\n\n                                yield StreamingEvent(\n                                    root=MessageStopEvent(type=\"message_stop\")\n                                )\n\n                                if context.claude_session:\n                                    await session_manager.remove_session(\n                                        context.claude_session.session_id\n                                    )\n\n                                return\n\n                    potential_matches = new_matches\n\n                    if potential_matches:\n                        earliest_start = min(\n                            start_pos for start_pos, _ in potential_matches\n                        )\n                        safe_length = earliest_start\n                    else:\n                        safe_length = len(buffer)\n\n                    if safe_length > 0:\n                        safe_text = buffer[:safe_length]\n                        yield StreamingEvent(\n                            root=ContentBlockDeltaEvent(\n                                type=\"content_block_delta\",\n                                index=current_index,\n                                delta=TextDelta(type=\"text_delta\", text=safe_text),\n                            )\n                        )\n\n                        buffer = buffer[safe_length:]\n                        new_matches = []\n                        for start_pos, matched_text in potential_matches:\n                            new_start = start_pos - safe_length\n                            if new_start >= 0:\n                                new_matches.append((new_start, matched_text))\n                        potential_matches = new_matches\n\n            else:\n                # Non-text event - flush buffer and reset\n                if buffer:\n                    yield StreamingEvent(\n                        root=ContentBlockDeltaEvent(\n                            type=\"content_block_delta\",\n                            index=current_index,\n                            delta=TextDelta(type=\"text_delta\", text=buffer),\n                        )\n                    )\n                    buffer = \"\"\n                    potential_matches = []\n\n                yield event\n"
  },
  {
    "path": "app/processors/claude_ai/streaming_response_processor.py",
    "content": "from loguru import logger\n\nfrom fastapi.responses import StreamingResponse\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.services.event_processing.event_serializer import EventSerializer\n\n\nclass StreamingResponseProcessor(BaseProcessor):\n    \"\"\"Processor that serializes event streams and creates a StreamingResponse.\"\"\"\n\n    def __init__(self):\n        super().__init__()\n        self.serializer = EventSerializer()\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Serialize the event_stream and create a StreamingResponse.\n\n        Requires:\n            - event_stream in context\n\n        Produces:\n            - response in context\n\n        This processor typically marks the end of the pipeline by returning STOP action.\n        \"\"\"\n        if context.response:\n            logger.debug(\"Skipping StreamingResponseProcessor due to existing response\")\n            return context\n\n        if not context.event_stream:\n            logger.warning(\n                \"Skipping StreamingResponseProcessor due to missing event_stream\"\n            )\n            return context\n\n        if (\n            not context.messages_api_request\n            or context.messages_api_request.stream is not True\n        ):\n            logger.debug(\n                \"Skipping StreamingResponseProcessor due to non-streaming request\"\n            )\n            return context\n\n        logger.info(\"Creating streaming response from event stream\")\n\n        sse_stream = self.serializer.serialize_stream(context.event_stream)\n\n        context.response = StreamingResponse(\n            sse_stream,\n            media_type=\"text/event-stream\",\n            headers={\n                \"Cache-Control\": \"no-cache\",\n                \"Connection\": \"keep-alive\",\n                \"X-Accel-Buffering\": \"no\",  # Disable nginx buffering\n            },\n        )\n\n        return context\n"
  },
  {
    "path": "app/processors/claude_ai/tavern_test_message_processor.py",
    "content": "from loguru import logger\nimport uuid\n\nfrom fastapi.responses import JSONResponse\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.claude import (\n    Message,\n    Role,\n    TextContent,\n    Usage,\n)\n\n\nclass TestMessageProcessor(BaseProcessor):\n    \"\"\"Processor that handles test messages.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Check if this is a test message and respond immediately if so.\n\n        Test message criteria:\n        - Only one message in messages array\n        - Message role is \"user\"\n        - Message content is \"Hi\"\n        - stream is False\n\n        If it's a test message, creates a MessagesAPIResponse and stops the pipeline.\n        \"\"\"\n        if not context.messages_api_request:\n            return context\n\n        request = context.messages_api_request\n\n        if (\n            len(request.messages) == 1\n            and request.messages[0].role == Role.USER\n            and request.stream is False\n            and (\n                (\n                    isinstance(request.messages[0].content, str)\n                    and request.messages[0].content == \"Hi\"\n                )\n                or (\n                    isinstance(request.messages[0].content, list)\n                    and len(request.messages[0].content) == 1\n                    and isinstance(request.messages[0].content[0], TextContent)\n                    and request.messages[0].content[0].text == \"Hi\"\n                )\n            )\n        ):\n            logger.debug(\"Test message detected, returning canned response\")\n\n            response = Message(\n                id=f\"msg_{uuid.uuid4().hex[:10]}\",\n                type=\"message\",\n                role=\"assistant\",\n                content=[\n                    TextContent(type=\"text\", text=\"Hello! How can I assist you today?\")\n                ],\n                model=request.model,\n                stop_reason=\"end_turn\",\n                stop_sequence=None,\n                usage=Usage(input_tokens=1, output_tokens=9),\n            )\n\n            context.response = JSONResponse(\n                content=response.model_dump(), status_code=200\n            )\n\n            context.metadata[\"stop_pipeline\"] = True\n            return context\n\n        return context\n"
  },
  {
    "path": "app/processors/claude_ai/token_counter_processor.py",
    "content": "from typing import AsyncIterator\nfrom loguru import logger\nimport tiktoken\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.streaming import (\n    MessageStartEvent,\n    StreamingEvent,\n    MessageDeltaEvent,\n)\nfrom app.models.claude import Usage\nfrom app.utils.messages import process_messages\n\nencoder = tiktoken.get_encoding(\"cl100k_base\")\n\n\nclass TokenCounterProcessor(BaseProcessor):\n    \"\"\"Processor that estimates token usage when it's not provided by the API.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Intercept MessageDeltaEvent and add token usage estimation if missing.\n\n        Requires:\n            - event_stream in context\n            - messages_api_request in context (for input token counting)\n            - collected_message in context (for output token counting)\n\n        Produces:\n            - event_stream with updated MessageDeltaEvent containing usage\n        \"\"\"\n        if not context.event_stream:\n            logger.warning(\"Skipping TokenCounterProcessor due to missing event_stream\")\n            return context\n\n        if not context.messages_api_request:\n            logger.warning(\n                \"Skipping TokenCounterProcessor due to missing messages_api_request\"\n            )\n            return context\n\n        logger.debug(\"Setting up token counting for stream\")\n\n        original_stream = context.event_stream\n        new_stream = self._count_tokens_generator(original_stream, context)\n        context.event_stream = new_stream\n\n        return context\n\n    async def _count_tokens_generator(\n        self,\n        event_stream: AsyncIterator[StreamingEvent],\n        context: ClaudeAIContext,\n    ) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Generator that adds token usage to MessageDeltaEvent if missing.\n        \"\"\"\n        # Pre-calculate input tokens once\n        input_tokens = await self._calculate_input_tokens(context)\n\n        async for event in event_stream:\n            if (\n                isinstance(event.root, MessageStartEvent)\n                and not event.root.message.usage\n            ):\n                usage = Usage(\n                    input_tokens=input_tokens,\n                    output_tokens=1,\n                    cache_creation_input_tokens=0,\n                    cache_read_input_tokens=0,\n                )\n\n                event.root.message.usage = usage\n                context.collected_message.usage = usage\n\n                logger.debug(f\"Added token usage estimation: input={input_tokens}\")\n\n            if isinstance(event.root, MessageDeltaEvent) and not event.root.usage:\n                output_tokens = await self._calculate_output_tokens(context)\n\n                usage = Usage(\n                    input_tokens=input_tokens,\n                    output_tokens=output_tokens,\n                    cache_creation_input_tokens=0,\n                    cache_read_input_tokens=0,\n                )\n\n                event.root.usage = usage\n                context.collected_message.usage = usage\n\n                logger.debug(\n                    f\"Added token usage estimation: input={input_tokens}, output={output_tokens}\"\n                )\n\n            yield event\n\n    async def _calculate_input_tokens(self, context: ClaudeAIContext) -> int:\n        \"\"\"Calculate input tokens from the request messages.\"\"\"\n        if not context.messages_api_request:\n            return 0\n\n        merged_text, _ = await process_messages(\n            context.messages_api_request.messages, context.messages_api_request.system\n        )\n\n        try:\n            tokens = len(encoder.encode(merged_text, disallowed_special=()))\n        except Exception:\n            logger.warning(\"Tiktoken encoding failed for input, falling back to estimation\")\n            tokens = len(merged_text) // 4\n\n        logger.debug(f\"Calculated {tokens} input tokens\")\n        return tokens\n\n    async def _calculate_output_tokens(self, context: ClaudeAIContext) -> int:\n        \"\"\"Calculate output tokens from the collected message.\"\"\"\n        if not context.collected_message:\n            return 0\n\n        merged_text, _ = await process_messages([context.collected_message])\n\n        try:\n            tokens = len(encoder.encode(merged_text, disallowed_special=()))\n        except Exception:\n            logger.warning(\"Tiktoken encoding failed for output, falling back to estimation\")\n            tokens = len(merged_text) // 4\n\n        logger.debug(f\"Calculated {tokens} output tokens\")\n        return tokens\n"
  },
  {
    "path": "app/processors/claude_ai/tool_call_event_processor.py",
    "content": "from typing import AsyncIterator, Optional\nfrom loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.streaming import (\n    StreamingEvent,\n    ContentBlockStartEvent,\n    ContentBlockStopEvent,\n    MessageDeltaEvent,\n    MessageStopEvent,\n    MessageDeltaData,\n)\nfrom app.models.claude import ToolResultContent, ToolUseContent\nfrom app.services.tool_call import tool_call_manager\n\n\nclass ToolCallEventProcessor(BaseProcessor):\n    \"\"\"Processor that handles tool use events in the streaming response.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Intercept tool use content blocks and inject MessageDelta/MessageStop events.\n\n        Requires:\n            - event_stream in context\n            - cladue_session in context\n\n        Produces:\n            - Modified event_stream with injected events for tool calls\n            - Pauses session when tool call is detected\n        \"\"\"\n        if not context.event_stream:\n            logger.warning(\n                \"Skipping ToolCallEventProcessor due to missing event_stream\"\n            )\n            return context\n\n        if not context.claude_session:\n            logger.warning(\"Skipping ToolCallEventProcessor due to missing session\")\n            return context\n\n        logger.debug(\"Setting up tool call event processing\")\n\n        original_stream = context.event_stream\n        new_stream = self._process_tool_events(original_stream, context)\n        context.event_stream = new_stream\n\n        return context\n\n    async def _process_tool_events(\n        self,\n        event_stream: AsyncIterator[StreamingEvent],\n        context: ClaudeAIContext,\n    ) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Process events and inject MessageDelta/MessageStop when tool use is detected.\n        \"\"\"\n        current_tool_use_id: Optional[str] = None\n        tool_use_detected = False\n        content_block_index: Optional[int] = None\n        tool_result_detected = False\n\n        async for event in event_stream:\n            # Check for ContentBlockStartEvent with tool_use type\n            if isinstance(event.root, ContentBlockStartEvent):\n                if isinstance(event.root.content_block, ToolUseContent):\n                    current_tool_use_id = event.root.content_block.id\n                    content_block_index = event.root.index\n                    tool_use_detected = True\n                    logger.debug(\n                        f\"Detected tool use start: {current_tool_use_id} \"\n                        f\"(name: {event.root.content_block.name})\"\n                    )\n                elif isinstance(event.root.content_block, ToolResultContent):\n                    logger.debug(\n                        f\"Detected tool result: {event.root.content_block.tool_use_id}\"\n                    )\n                    tool_result_detected = True\n\n            # Yield the original event\n            if tool_result_detected:\n                logger.debug(\"Skipping tool result content block\")\n            else:\n                yield event\n\n            # Check for ContentBlockStopEvent for a tool use block\n            if isinstance(event.root, ContentBlockStopEvent):\n                if tool_result_detected:\n                    logger.debug(\"Tool result block ended\")\n                    tool_result_detected = False\n                if (\n                    tool_use_detected\n                    and content_block_index is not None\n                    and event.root.index == content_block_index\n                ):\n                    logger.debug(f\"Tool use block ended: {current_tool_use_id}\")\n\n                    message_delta = MessageDeltaEvent(\n                        type=\"message_delta\",\n                        delta=MessageDeltaData(stop_reason=\"tool_use\"),\n                        usage=None,\n                    )\n                    yield StreamingEvent(root=message_delta)\n\n                    message_stop = MessageStopEvent(type=\"message_stop\")\n                    yield StreamingEvent(root=message_stop)\n\n                    # Register the tool call\n                    if current_tool_use_id and context.claude_session:\n                        tool_call_manager.register_tool_call(\n                            tool_use_id=current_tool_use_id,\n                            session_id=context.claude_session.session_id,\n                            message_id=context.collected_message.id\n                            if context.collected_message\n                            else None,\n                        )\n\n                        logger.info(\n                            f\"Registered tool call {current_tool_use_id} for session {context.claude_session.session_id}\"\n                        )\n\n                    current_tool_use_id = None\n                    tool_use_detected = False\n                    content_block_index = None\n\n                    break\n"
  },
  {
    "path": "app/processors/claude_ai/tool_result_processor.py",
    "content": "import uuid\nfrom loguru import logger\n\nfrom app.processors.base import BaseProcessor\nfrom app.processors.claude_ai import ClaudeAIContext\nfrom app.models.claude import TextContent, ToolResultContent\nfrom app.models.streaming import MessageStartEvent, StreamingEvent\nfrom app.services.tool_call import tool_call_manager\nfrom app.services.session import session_manager\nfrom app.services.event_processing import EventSerializer\n\nevent_serializer = EventSerializer()\n\n\nclass ToolResultProcessor(BaseProcessor):\n    \"\"\"Processor that handles tool result messages and resumes paused sessions.\"\"\"\n\n    async def process(self, context: ClaudeAIContext) -> ClaudeAIContext:\n        \"\"\"\n        Check if the last message is a tool result and handle accordingly.\n\n        Requires:\n            - messages_api_request in context\n\n        Produces:\n            - Resumes paused session if tool result matches\n            - Sets event_stream from resumed session\n            - Skips normal request building/sending\n        \"\"\"\n        if not context.messages_api_request:\n            logger.warning(\n                \"Skipping ToolResultProcessor due to missing messages_api_request\"\n            )\n            return context\n\n        messages = context.messages_api_request.messages\n        if not messages:\n            return context\n\n        last_message = messages[-1]\n\n        if last_message.role != \"user\":\n            return context\n\n        if isinstance(last_message.content, str):\n            return context\n\n        # Find tool result content block\n        lsat_content_block = last_message.content[-1]\n        if not isinstance(lsat_content_block, ToolResultContent):\n            return context\n\n        tool_result = lsat_content_block\n\n        logger.debug(f\"Found tool result for tool_use_id: {tool_result.tool_use_id}\")\n\n        # Check if we have a pending tool call for this ID\n        tool_call_state = tool_call_manager.get_tool_call(tool_result.tool_use_id)\n        if not tool_call_state:\n            logger.debug(\n                f\"No pending tool call found for tool_use_id: {tool_result.tool_use_id}\"\n            )\n            return context\n\n        # Get the session\n        session = await session_manager.get_session(tool_call_state.session_id)\n        if not session:\n            logger.error(\n                f\"Session {tool_call_state.session_id} not found for tool call {tool_result.tool_use_id}\"\n            )\n            tool_call_manager.complete_tool_call(tool_result.tool_use_id)\n            return context\n\n        if isinstance(tool_result.content, str):\n            tool_result.content = [TextContent(type=\"text\", text=tool_result.content)]\n        tool_result_payload = tool_result.model_dump()\n\n        await session.send_tool_result(tool_result_payload)\n        logger.info(\n            f\"Sent tool result for {tool_result.tool_use_id} to session {session.session_id}\"\n        )\n\n        if not session.sse_stream:\n            logger.error(f\"No stream available for session {session.session_id}\")\n            tool_call_manager.complete_tool_call(tool_result.tool_use_id)\n            return context\n\n        # Continue with the existing stream\n        resumed_stream = session.sse_stream\n\n        message_start_event = MessageStartEvent(\n            type=\"message_start\",\n            message=context.collected_message\n            if context.collected_message\n            else {\n                \"id\": tool_call_state.message_id or str(uuid.uuid4()),\n                \"type\": \"message\",\n                \"role\": \"assistant\",\n                \"content\": [],\n                \"model\": context.messages_api_request.model,\n            },\n        )\n\n        # Create a generator that yields the message start event followed by the resumed stream\n        async def resumed_event_stream():\n            yield event_serializer.serialize_event(\n                StreamingEvent(root=message_start_event)\n            )\n            async for event in resumed_stream:\n                yield event\n\n        context.original_stream = resumed_event_stream()\n        context.claude_session = session\n\n        tool_call_manager.complete_tool_call(tool_result.tool_use_id)\n\n        # Skip the normal Claude AI processor\n        context.metadata[\"skip_processors\"] = [\n            \"ClaudeAPIProcessor\",\n            \"ClaudeWebProcessor\",\n        ]\n\n        return context\n"
  },
  {
    "path": "app/processors/pipeline.py",
    "content": "from typing import List, Optional\nfrom loguru import logger\n\nfrom app.processors.base import BaseContext, BaseProcessor\n\n\nclass ProcessingPipeline(BaseProcessor):\n    \"\"\"\n    Main pipeline for processing Claude requests.\n    \"\"\"\n\n    def __init__(self, processors: Optional[List[BaseProcessor]] = None):\n        \"\"\"\n        Initialize the pipeline with processors.\n\n        Args:\n            processors: List of processors to use. If None, default processors are used.\n        \"\"\"\n        self.processors = processors\n\n        logger.debug(f\"Initialized pipeline with {len(self.processors)} processors\")\n        for processor in self.processors:\n            logger.debug(f\"  - {processor.name}\")\n\n    async def process(self, context: BaseContext) -> BaseContext:\n        \"\"\"\n        Process a request through the pipeline.\n\n        Args:\n            context: The processing context\n\n        Returns:\n            Updated context.\n        \"\"\"\n\n        logger.debug(\"Starting pipeline processing\")\n\n        # Process through each processor\n        for i, processor in enumerate(self.processors):\n            if processor.name in context.metadata.get(\"skip_processors\", []):\n                logger.debug(\n                    f\"Skipping processor {processor.name} due to being in skip_processors list\"\n                )\n                continue\n\n            logger.debug(\n                f\"Running processor {i + 1}/{len(self.processors)}: {processor.name}\"\n            )\n\n            context = await processor.process(context)\n\n            if context.metadata.get(\"stop_pipeline\", False):\n                logger.debug(f\"Pipeline stopped by {processor.name}\")\n                break\n\n        logger.debug(\"Pipeline processing completed successfully\")\n        return context\n"
  },
  {
    "path": "app/services/__init__.py",
    "content": ""
  },
  {
    "path": "app/services/account.py",
    "content": "import asyncio\nfrom datetime import datetime, UTC\nfrom typing import List, Optional, Dict, Set\n\nfrom collections import defaultdict\nfrom loguru import logger\nimport threading\nimport json\nimport uuid\n\nfrom app.core.config import settings\nfrom app.core.exceptions import NoAccountsAvailableError\nfrom app.core.account import Account, AccountStatus, AuthType, OAuthToken\nfrom app.services.oauth import oauth_authenticator\n\n\nclass AccountManager:\n    \"\"\"\n    Singleton manager for Claude.ai accounts with load balancing and rate limit recovery.\n    Supports both cookie and OAuth authentication.\n    \"\"\"\n\n    _instance: Optional[\"AccountManager\"] = None\n    _lock = threading.Lock()\n\n    def __new__(cls):\n        \"\"\"Implement singleton pattern.\"\"\"\n        if cls._instance is None:\n            with cls._lock:\n                if cls._instance is None:\n                    cls._instance = super().__new__(cls)\n        return cls._instance\n\n    def __init__(self):\n        \"\"\"Initialize the AccountManager.\"\"\"\n        self._accounts: Dict[str, Account] = {}  # organization_uuid -> Account\n        self._cookie_to_uuid: Dict[str, str] = {}  # cookie_value -> organization_uuid\n        self._session_accounts: Dict[str, str] = {}  # session_id -> organization_uuid\n        self._account_sessions: Dict[str, Set[str]] = defaultdict(\n            set\n        )  # organization_uuid -> set of session_ids\n        self._account_task: Optional[asyncio.Task] = None\n        self._max_sessions_per_account = settings.max_sessions_per_cookie\n        self._account_task_interval = settings.account_task_interval\n\n        logger.info(\"AccountManager initialized\")\n\n    async def add_account(\n        self,\n        cookie_value: Optional[str] = None,\n        oauth_token: Optional[OAuthToken] = None,\n        organization_uuid: Optional[str] = None,\n        capabilities: Optional[List[str]] = None,\n    ) -> Account:\n        \"\"\"Add a new account to the manager.\n\n        Args:\n            cookie_value: The cookie value (optional)\n            oauth_token: The OAuth token (optional)\n            organization_uuid: The organization UUID (optional, will be fetched or generated if not provided)\n            capabilities: The account capabilities (optional)\n\n        Raises:\n            ValueError: If neither cookie_value nor oauth_token is provided\n        \"\"\"\n        if not cookie_value and not oauth_token:\n            raise ValueError(\"Either cookie_value or oauth_token must be provided\")\n\n        if cookie_value and cookie_value in self._cookie_to_uuid:\n            return self._accounts[self._cookie_to_uuid[cookie_value]]\n\n        if cookie_value and (not organization_uuid or not capabilities):\n            (\n                fetched_uuid,\n                capabilities,\n            ) = await oauth_authenticator.get_organization_info(cookie_value)\n            if fetched_uuid:\n                organization_uuid = fetched_uuid\n\n        if organization_uuid and organization_uuid in self._accounts:\n            existing_account = self._accounts[organization_uuid]\n\n            if cookie_value and existing_account.cookie_value != cookie_value:\n                if existing_account.cookie_value:\n                    del self._cookie_to_uuid[existing_account.cookie_value]\n                existing_account.cookie_value = cookie_value\n                self._cookie_to_uuid[cookie_value] = organization_uuid\n            return existing_account\n\n        if not organization_uuid:\n            organization_uuid = str(uuid.uuid4())\n            logger.info(f\"Generated new organization UUID: {organization_uuid}\")\n\n        # Create new account\n        if cookie_value and oauth_token:\n            auth_type = AuthType.BOTH\n        elif cookie_value:\n            auth_type = AuthType.COOKIE_ONLY\n        else:\n            auth_type = AuthType.OAUTH_ONLY\n\n        account = Account(\n            organization_uuid=organization_uuid,\n            capabilities=capabilities,\n            cookie_value=cookie_value,\n            oauth_token=oauth_token,\n            auth_type=auth_type,\n        )\n        self._accounts[organization_uuid] = account\n        self.save_accounts()\n\n        if cookie_value:\n            self._cookie_to_uuid[cookie_value] = organization_uuid\n\n        logger.info(\n            f\"Added new account: {organization_uuid[:8]}... \"\n            f\"(auth_type: {auth_type.value}, \"\n            f\"cookie: {cookie_value[:20] + '...' if cookie_value else 'None'}, \"\n            f\"oauth: {'Yes' if oauth_token else 'No'})\"\n        )\n\n        if auth_type == AuthType.COOKIE_ONLY:\n            asyncio.create_task(self._attempt_oauth_authentication(account))\n\n        return account\n\n    async def remove_account(self, organization_uuid: str) -> None:\n        \"\"\"Remove an account from the manager.\"\"\"\n        if organization_uuid in self._accounts:\n            account = self._accounts[organization_uuid]\n            sessions_to_remove = list(\n                self._account_sessions.get(organization_uuid, set())\n            )\n\n            for session_id in sessions_to_remove:\n                if session_id in self._session_accounts:\n                    del self._session_accounts[session_id]\n\n            if account.cookie_value and account.cookie_value in self._cookie_to_uuid:\n                del self._cookie_to_uuid[account.cookie_value]\n\n            del self._accounts[organization_uuid]\n\n            if organization_uuid in self._account_sessions:\n                del self._account_sessions[organization_uuid]\n\n            logger.info(f\"Removed account: {organization_uuid[:8]}...\")\n            self.save_accounts()\n\n    async def get_account_for_session(\n        self,\n        session_id: str,\n        is_pro: Optional[bool] = None,\n        is_max: Optional[bool] = None,\n    ) -> Account:\n        \"\"\"\n        Get an available account for the session with load balancing.\n\n        Args:\n            session_id: Unique identifier for the session\n            is_pro: Filter by pro capability. None means any.\n            is_max: Filter by max capability. None means any.\n\n        Returns:\n            Account instance if available\n        \"\"\"\n        # Convert single auth_type to list for uniform handling\n        if session_id in self._session_accounts:\n            organization_uuid = self._session_accounts[session_id]\n            if organization_uuid in self._accounts:\n                account = self._accounts[organization_uuid]\n                if account.status == AccountStatus.VALID:\n                    return account\n                else:\n                    del self._session_accounts[session_id]\n                    self._account_sessions[organization_uuid].discard(session_id)\n\n        best_account = None\n        min_sessions = float(\"inf\")\n        earliest_last_used = None\n\n        for organization_uuid, account in self._accounts.items():\n            if account.status != AccountStatus.VALID:\n                continue\n\n            # Filter by auth type if specified\n            if account.auth_type not in [AuthType.BOTH, AuthType.COOKIE_ONLY]:\n                continue\n\n            # Filter by capabilities if specified\n            if is_pro is not None and account.is_pro != is_pro:\n                continue\n            if is_max is not None and account.is_max != is_max:\n                continue\n\n            session_count = len(self._account_sessions[organization_uuid])\n            if session_count >= self._max_sessions_per_account:\n                continue\n\n            # Select account with least sessions\n            # If multiple accounts have the same least sessions, select the one with earliest last_used\n            if session_count < min_sessions or (\n                session_count == min_sessions\n                and (\n                    earliest_last_used is not None\n                    and account.last_used < earliest_last_used\n                )\n            ):\n                min_sessions = session_count\n                earliest_last_used = account.last_used\n                best_account = account\n\n        if best_account:\n            self._session_accounts[session_id] = best_account.organization_uuid\n            self._account_sessions[best_account.organization_uuid].add(session_id)\n\n            logger.debug(\n                f\"Assigned account to session {session_id}, \"\n                f\"account now has {len(self._account_sessions[best_account.organization_uuid])} sessions\"\n            )\n\n            return best_account\n\n        raise NoAccountsAvailableError()\n\n    async def get_account_for_oauth(\n        self,\n        is_pro: Optional[bool] = None,\n        is_max: Optional[bool] = None,\n    ) -> Account:\n        \"\"\"\n        Get an available account for OAuth authentication.\n\n        Args:\n            is_pro: Filter by pro capability. None means any.\n            is_max: Filter by max capability. None means any.\n\n        Returns:\n            Account instance if available\n        \"\"\"\n        earliest_account = None\n        earliest_last_used = None\n\n        for account in self._accounts.values():\n            if account.status != AccountStatus.VALID:\n                continue\n\n            if account.auth_type not in [AuthType.OAUTH_ONLY, AuthType.BOTH]:\n                continue\n\n            # Filter by capabilities if specified\n            if is_pro is not None and account.is_pro != is_pro:\n                continue\n            if is_max is not None and account.is_max != is_max:\n                continue\n\n            if earliest_last_used is None or account.last_used < earliest_last_used:\n                earliest_last_used = account.last_used\n                earliest_account = account\n\n        if earliest_account:\n            logger.debug(\n                f\"Selected OAuth account: {earliest_account.organization_uuid[:8]}... \"\n                f\"(last used: {earliest_account.last_used.isoformat()})\"\n            )\n            return earliest_account\n\n        raise NoAccountsAvailableError()\n\n    async def get_account_by_id(self, account_id: str) -> Optional[Account]:\n        \"\"\"\n        Get an account by its organization UUID.\n\n        Args:\n            account_id: The organization UUID of the account\n\n        Returns:\n            Account instance if found and valid, None otherwise\n        \"\"\"\n        account = self._accounts.get(account_id)\n        \n        if account and account.status == AccountStatus.VALID:\n            logger.debug(f\"Retrieved account by ID: {account_id[:8]}...\")\n            return account\n        \n        if account:\n            logger.debug(\n                f\"Account {account_id[:8]}... found but not valid: status={account.status}\"\n            )\n        else:\n            logger.debug(f\"Account {account_id[:8]}... not found\")\n        \n        return None\n\n    async def release_session(self, session_id: str) -> None:\n        \"\"\"Release a session's account assignment.\"\"\"\n        if session_id in self._session_accounts:\n            organization_uuid = self._session_accounts[session_id]\n            del self._session_accounts[session_id]\n\n            if organization_uuid in self._account_sessions:\n                self._account_sessions[organization_uuid].discard(session_id)\n\n            logger.debug(f\"Released account for session {session_id}\")\n\n    async def start_task(self) -> None:\n        \"\"\"Start the background task for AccountManager.\"\"\"\n        if self._account_task is None or self._account_task.done():\n            self._account_task = asyncio.create_task(self._task_loop())\n\n    async def stop_task(self) -> None:\n        \"\"\"Stop the background task for AccountManager.\"\"\"\n        if self._account_task and not self._account_task.done():\n            self._account_task.cancel()\n            try:\n                await self._account_task\n            except asyncio.CancelledError:\n                pass\n\n    async def _task_loop(self) -> None:\n        \"\"\"Background loop for AccountManager.\"\"\"\n        while True:\n            try:\n                await self._check_and_recover_accounts()\n                await self._check_and_refresh_accounts()\n            except asyncio.CancelledError:\n                break\n            except Exception as e:\n                logger.error(f\"Error in task loop: {e}\")\n            finally:\n                await asyncio.sleep(self._account_task_interval)\n\n    async def _check_and_recover_accounts(self) -> None:\n        \"\"\"Check and recover rate-limited accounts.\"\"\"\n        current_time = datetime.now(UTC)\n\n        for account in self._accounts.values():\n            # Check rate-limited accounts\n            if (\n                account.status == AccountStatus.RATE_LIMITED\n                and account.resets_at\n                and current_time >= account.resets_at\n            ):\n                account.status = AccountStatus.VALID\n                account.resets_at = None\n                logger.info(\n                    f\"Recovered rate-limited account: {account.organization_uuid[:8]}...\"\n                )\n\n    async def _check_and_refresh_accounts(self) -> None:\n        \"\"\"Check and refresh expired/expiring tokens.\"\"\"\n        current_timestamp = datetime.now(UTC).timestamp()\n\n        for account in self._accounts.values():\n            if (\n                account.auth_type in [AuthType.OAUTH_ONLY, AuthType.BOTH]\n                and account.oauth_token\n                and account.oauth_token.refresh_token\n                and account.oauth_token.expires_at\n            ):\n                if account.oauth_token.expires_at - current_timestamp < 300:\n                    asyncio.create_task(self._refresh_account_token(account))\n\n    async def _refresh_account_token(self, account: Account) -> None:\n        \"\"\"Refresh OAuth token for an account.\"\"\"\n        logger.info(\n            f\"Refreshing OAuth token for account: {account.organization_uuid[:8]}...\"\n        )\n\n        success = await oauth_authenticator.refresh_account_token(account)\n        if success:\n            logger.info(\n                f\"Successfully refreshed OAuth token for account: {account.organization_uuid[:8]}...\"\n            )\n        else:\n            logger.warning(\n                f\"Failed to refresh OAuth token for account: {account.organization_uuid[:8]}...\"\n            )\n            if account.auth_type == AuthType.BOTH:\n                account.auth_type = AuthType.COOKIE_ONLY\n                account.oauth_token = None\n            else:\n                account.status = AccountStatus.INVALID\n                logger.error(\n                    f\"Account {account.organization_uuid[:8]} is now invalid due to OAuth refresh failure\"\n                )\n            self.save_accounts()\n\n    async def _attempt_oauth_authentication(self, account: Account) -> None:\n        \"\"\"Attempt OAuth authentication for an account.\"\"\"\n\n        logger.info(\n            f\"Attempting OAuth authentication for account: {account.organization_uuid[:8]}...\"\n        )\n\n        success = await oauth_authenticator.authenticate_account(account)\n        if not success:\n            logger.warning(\n                f\"OAuth authentication failed for account: {account.organization_uuid[:8]}..., keeping as CookieOnly\"\n            )\n        else:\n            logger.info(\n                f\"OAuth authentication successful for account: {account.organization_uuid[:8]}...\"\n            )\n\n    async def get_status(self) -> Dict:\n        \"\"\"Get the current status of all accounts.\"\"\"\n        status = {\n            \"total_accounts\": len(self._accounts),\n            \"valid_accounts\": sum(\n                1 for a in self._accounts.values() if a.status == AccountStatus.VALID\n            ),\n            \"rate_limited_accounts\": sum(\n                1\n                for a in self._accounts.values()\n                if a.status == AccountStatus.RATE_LIMITED\n            ),\n            \"invalid_accounts\": sum(\n                1 for a in self._accounts.values() if a.status == AccountStatus.INVALID\n            ),\n            \"active_sessions\": len(self._session_accounts),\n            \"accounts\": [],\n        }\n\n        for organization_uuid, account in self._accounts.items():\n            account_info = {\n                \"organization_uuid\": organization_uuid[:8] + \"...\",\n                \"cookie\": account.cookie_value[:20] + \"...\"\n                if account.cookie_value\n                else \"None\",\n                \"status\": account.status.value,\n                \"auth_type\": account.auth_type.value,\n                \"sessions\": len(self._account_sessions[organization_uuid]),\n                \"last_used\": account.last_used.isoformat(),\n                \"resets_at\": account.resets_at.isoformat()\n                if account.resets_at\n                else None,\n                \"has_oauth\": account.oauth_token is not None,\n            }\n            status[\"accounts\"].append(account_info)\n\n        return status\n\n    def save_accounts(self) -> None:\n        \"\"\"Save all accounts to JSON file.\n\n        Args:\n            data_folder: Optional data folder path. If not provided, uses settings.data_folder\n        \"\"\"\n        if settings.no_filesystem_mode:\n            logger.debug(\"No-filesystem mode enabled, skipping account save to disk\")\n            return\n\n        settings.data_folder.mkdir(parents=True, exist_ok=True)\n\n        accounts_file = settings.data_folder / \"accounts.json\"\n\n        accounts_data = {\n            organization_uuid: account.to_dict()\n            for organization_uuid, account in self._accounts.items()\n        }\n\n        with open(accounts_file, \"w\", encoding=\"utf-8\") as f:\n            json.dump(accounts_data, f, indent=2)\n\n        logger.info(f\"Saved {len(accounts_data)} accounts to {accounts_file}\")\n\n    def load_accounts(self) -> None:\n        \"\"\"Load accounts from JSON file.\n\n        Args:\n            data_folder: Optional data folder path. If not provided, uses settings.data_folder\n        \"\"\"\n        if settings.no_filesystem_mode:\n            logger.debug(\"No-filesystem mode enabled, skipping account load from disk\")\n            return\n\n        accounts_file = settings.data_folder / \"accounts.json\"\n\n        if not accounts_file.exists():\n            logger.info(f\"No accounts file found at {accounts_file}\")\n            return\n\n        try:\n            with open(accounts_file, \"r\", encoding=\"utf-8\") as f:\n                accounts_data = json.load(f)\n\n            for organization_uuid, account_data in accounts_data.items():\n                account = Account.from_dict(account_data)\n                self._accounts[organization_uuid] = account\n\n                # Rebuild cookie mapping\n                if account.cookie_value:\n                    self._cookie_to_uuid[account.cookie_value] = organization_uuid\n\n            logger.info(f\"Loaded {len(accounts_data)} accounts from {accounts_file}\")\n\n        except Exception as e:\n            logger.error(f\"Failed to load accounts from {accounts_file}: {e}\")\n\n    def __repr__(self) -> str:\n        \"\"\"String representation of the AccountManager.\"\"\"\n        return f\"<AccountManager accounts={len(self._accounts)} sessions={len(self._session_accounts)}>\"\n\n\naccount_manager = AccountManager()\n"
  },
  {
    "path": "app/services/cache.py",
    "content": "import asyncio\nimport hashlib\nimport json\nimport threading\nfrom datetime import datetime, timedelta\nfrom typing import Dict, List, Optional, Tuple\n\nfrom loguru import logger\n\nfrom app.core.config import settings\nfrom app.models.claude import (\n    Base64ImageSource,\n    FileImageSource,\n    ImageContent,\n    InputMessage,\n    ContentBlock,\n    ServerToolUseContent,\n    TextContent,\n    ThinkingContent,\n    ToolResultContent,\n    ToolUseContent,\n    URLImageSource,\n    WebSearchToolResultContent,\n)\n\n\nclass CacheCheckpoint:\n    \"\"\"Cache checkpoint with timestamp.\"\"\"\n\n    def __init__(self, checkpoint: str, account_id: str):\n        self.checkpoint = checkpoint\n        self.account_id = account_id\n        self.created_at = datetime.now()\n\n\nclass CacheService:\n    \"\"\"\n    Service for managing prompt cache mapping to accounts.\n    Ensures requests with cached prompts are sent to the same account.\n    \"\"\"\n\n    _instance: Optional[\"CacheService\"] = None\n    _lock = threading.Lock()\n\n    def __new__(cls):\n        \"\"\"Implement singleton pattern.\"\"\"\n        if cls._instance is None:\n            with cls._lock:\n                if cls._instance is None:\n                    cls._instance = super().__new__(cls)\n        return cls._instance\n\n    def __init__(self):\n        \"\"\"Initialize the CacheService.\"\"\"\n        # Maps checkpoint hash -> CacheCheckpoint\n        self._checkpoints: Dict[str, CacheCheckpoint] = {}\n        self._cleanup_task: Optional[asyncio.Task] = None\n\n        logger.info(\n            f\"CacheService initialized with timeout={settings.cache_timeout}s, \"\n            f\"cleanup_interval={settings.cache_cleanup_interval}s\"\n        )\n\n    def process_messages(\n        self,\n        model: str,\n        messages: List[InputMessage],\n        system: Optional[List[TextContent]] = None,\n    ) -> Tuple[Optional[str], List[str]]:\n        \"\"\"\n        Process messages to find cached account and extract new checkpoints.\n\n        Args:\n            messages: List of input messages\n            system: Optional system messages\n\n        Returns:\n            Tuple of (account_id, checkpoints) where:\n            - account_id: The account ID if a cached prompt was found, None otherwise\n            - checkpoints: List of feature values for content blocks with cache_control\n        \"\"\"\n        account_id: Optional[str] = None\n        checkpoints: List[str] = []\n\n        hasher = hashlib.sha256()\n\n        self._update_hasher(hasher, {\"model\": model})\n\n        if system:\n            for text_content in system:\n                content_block_data = self._content_block_to_dict(text_content)\n                self._update_hasher(hasher, content_block_data)\n\n                feature_value = hasher.hexdigest()\n\n                if text_content.cache_control:\n                    checkpoints.append(feature_value)\n\n                if feature_value in self._checkpoints:\n                    account_id = self._checkpoints[feature_value].account_id\n\n        for message in messages:\n            self._update_hasher(hasher, {\"role\": message.role.value})\n\n            if isinstance(message.content, str):\n                self._update_hasher(hasher, {\"type\": \"text\", \"text\": message.content})\n            elif isinstance(message.content, list):\n                for content_block in message.content:\n                    content_block_data = self._content_block_to_dict(content_block)\n                    self._update_hasher(hasher, content_block_data)\n\n                    feature_value = hasher.hexdigest()\n\n                    if (\n                        hasattr(content_block, \"cache_control\")\n                        and content_block.cache_control\n                    ):\n                        checkpoints.append(feature_value)\n\n                    if feature_value in self._checkpoints:\n                        account_id = self._checkpoints[feature_value].account_id\n\n        if account_id:\n            logger.debug(\n                f\"Cache hit: account_id={account_id}, feature={feature_value[:16]}...\"\n            )\n\n        return account_id, checkpoints\n\n    def add_checkpoints(self, checkpoints: List[str], account_id: str) -> None:\n        \"\"\"\n        Add checkpoint mappings to the cache.\n\n        Args:\n            checkpoints: List of feature values to map\n            account_id: Account ID to map to\n        \"\"\"\n        for checkpoint in checkpoints:\n            self._checkpoints[checkpoint] = CacheCheckpoint(checkpoint, account_id)\n            logger.debug(\n                f\"Added checkpoint mapping: {checkpoint[:16]}... -> {account_id}\"\n            )\n\n        logger.debug(\n            f\"Cache updated: {len(checkpoints)} checkpoints added. \"\n            f\"Total cache size: {len(self._checkpoints)}\"\n        )\n\n    def _update_hasher(self, hasher: \"hashlib._Hash\", data: Dict) -> None:\n        \"\"\"\n        Update the hasher with new data in a consistent way.\n\n        Args:\n            hasher: The hash object to update\n            data: Dictionary data to add to the hash\n        \"\"\"\n        # Serialize data in a consistent way\n        json_str = json.dumps(data, sort_keys=True, separators=(\",\", \":\"))\n\n        # Add a delimiter to ensure proper separation between blocks\n        hasher.update(b\"\\x00\")  # NULL byte as delimiter\n        hasher.update(json_str.encode(\"utf-8\"))\n\n    def _content_block_to_dict(self, content_block: ContentBlock) -> Dict:\n        \"\"\"\n        Convert a ContentBlock to a dictionary for hashing.\n        Only includes relevant fields for cache matching.\n        \"\"\"\n        result = {\"type\": content_block.type}\n\n        if isinstance(content_block, TextContent):\n            result[\"text\"] = content_block.text\n        elif isinstance(content_block, ThinkingContent):\n            result[\"thinking\"] = content_block.thinking\n        elif isinstance(content_block, ToolUseContent) or isinstance(\n            content_block, ServerToolUseContent\n        ):\n            result[\"id\"] = content_block.id\n        elif isinstance(content_block, ToolResultContent) or isinstance(\n            content_block, WebSearchToolResultContent\n        ):\n            result[\"tool_use_id\"] = content_block.tool_use_id\n        elif isinstance(content_block, ImageContent):\n            result[\"source_type\"] = content_block.source.type\n            if isinstance(content_block.source, Base64ImageSource):\n                result[\"source_data\"] = content_block.source.data\n            elif isinstance(content_block.source, URLImageSource):\n                result[\"source_url\"] = content_block.source.url\n            elif isinstance(content_block.source, FileImageSource):\n                result[\"source_file\"] = content_block.source.file_uuid\n\n        return result\n\n    async def start_cleanup_task(self) -> None:\n        \"\"\"Start the background task for cleaning up expired cache checkpoints.\"\"\"\n        if self._cleanup_task is None or self._cleanup_task.done():\n            self._cleanup_task = asyncio.create_task(self._cleanup_loop())\n            logger.info(\"Started cache cleanup task\")\n\n    async def stop_cleanup_task(self) -> None:\n        \"\"\"Stop the background cleanup task.\"\"\"\n        if self._cleanup_task and not self._cleanup_task.done():\n            self._cleanup_task.cancel()\n            try:\n                await self._cleanup_task\n            except asyncio.CancelledError:\n                pass\n            logger.info(\"Stopped cache cleanup task\")\n\n    async def _cleanup_loop(self) -> None:\n        \"\"\"Background loop to clean up expired cache checkpoints.\"\"\"\n        while True:\n            try:\n                self._cleanup_expired_checkpoints()\n                await asyncio.sleep(settings.cache_cleanup_interval)\n            except asyncio.CancelledError:\n                break\n            except Exception as e:\n                logger.error(f\"Error in cache cleanup loop: {e}\")\n                await asyncio.sleep(settings.cache_cleanup_interval)\n\n    def _cleanup_expired_checkpoints(self) -> None:\n        \"\"\"Clean up all expired cache checkpoints.\"\"\"\n        current_time = datetime.now()\n        timeout_duration = timedelta(seconds=settings.cache_timeout)\n        expired_checkpoints = []\n\n        for checkpoint_hash, cache_checkpoint in self._checkpoints.items():\n            if (current_time - cache_checkpoint.created_at) > timeout_duration:\n                expired_checkpoints.append(checkpoint_hash)\n\n        for checkpoint_hash in expired_checkpoints:\n            del self._checkpoints[checkpoint_hash]\n\n        if expired_checkpoints:\n            logger.info(\n                f\"Cleaned up {len(expired_checkpoints)} expired cache checkpoints\"\n            )\n\n    async def cleanup_all(self) -> None:\n        \"\"\"Clean up all cache checkpoints and stop the cleanup task.\"\"\"\n        await self.stop_cleanup_task()\n        self._checkpoints.clear()\n        logger.info(\"Cleaned up all cache checkpoints\")\n\n    def __repr__(self) -> str:\n        \"\"\"String representation of the CacheService.\"\"\"\n        return f\"<CacheService checkpoints={len(self._checkpoints)}>\"\n\n\ncache_service = CacheService()\n"
  },
  {
    "path": "app/services/event_processing/__init__.py",
    "content": "from .event_parser import EventParser\nfrom .event_serializer import EventSerializer\n\n__all__ = [\n    \"EventParser\",\n    \"EventSerializer\",\n]\n"
  },
  {
    "path": "app/services/event_processing/event_parser.py",
    "content": "import json\nfrom typing import AsyncIterator, Optional\nfrom dataclasses import dataclass\nfrom loguru import logger\n\nfrom pydantic import ValidationError\n\nfrom app.models.streaming import (\n    StreamingEvent,\n    UnknownEvent,\n)\n\n\n@dataclass\nclass SSEMessage:\n    event: Optional[str] = None\n    data: Optional[str] = None\n\n\nclass EventParser:\n    \"\"\"Parses SSE (Server-Sent Events) streams into StreamingEvent objects.\"\"\"\n\n    def __init__(self, skip_unknown_events: bool = True):\n        self.skip_unknown_events = skip_unknown_events\n        self.buffer = \"\"\n\n    async def parse_stream(\n        self, stream: AsyncIterator[str]\n    ) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Parse an SSE stream and yield StreamingEvent objects.\n\n        Args:\n            stream: AsyncIterator that yields string chunks from the SSE stream\n\n        Yields:\n            StreamingEvent objects parsed from the stream\n        \"\"\"\n        async for chunk in stream:\n            chunk = chunk.replace('\\r\\n', '\\n') # Normalize line endings\n            self.buffer += chunk\n\n            async for event in self._process_buffer():\n                logger.debug(f\"Parsed event:\\n{event.model_dump()}\")\n                yield event\n\n        async for event in self.flush():\n            yield event\n\n    async def _process_buffer(self) -> AsyncIterator[StreamingEvent]:\n        \"\"\"Process the buffer and yield complete SSE messages as StreamingEvent objects.\"\"\"\n        while \"\\n\\n\" in self.buffer:\n            message_end = self.buffer.index(\"\\n\\n\")\n            message_text = self.buffer[:message_end]\n            self.buffer = self.buffer[message_end + 2 :]\n\n            sse_msg = self._parse_sse_message(message_text)\n\n            if sse_msg.data:\n                event = self._create_streaming_event(sse_msg)\n                if event:\n                    yield event\n\n    def _parse_sse_message(self, message_text: str) -> SSEMessage:\n        \"\"\"Parse a single SSE message from text.\"\"\"\n        sse_msg = SSEMessage()\n\n        for line in message_text.split(\"\\n\"):\n            if not line:\n                continue\n\n            if \":\" not in line:\n                field = line\n                value = \"\"\n            else:\n                field, value = line.split(\":\", 1)\n                if value.startswith(\" \"):\n                    value = value[1:]\n\n            if field == \"event\":\n                sse_msg.event = value\n            elif field == \"data\":\n                if sse_msg.data is None:\n                    sse_msg.data = value\n                else:\n                    sse_msg.data += \"\\n\" + value\n\n        return sse_msg\n\n    def _create_streaming_event(self, sse_msg: SSEMessage) -> Optional[StreamingEvent]:\n        \"\"\"\n        Create a StreamingEvent from an SSE message.\n\n        Args:\n            sse_msg: The parsed SSE message\n\n        Returns:\n            StreamingEvent object or None if parsing fails\n        \"\"\"\n        try:\n            data = json.loads(sse_msg.data)\n\n        except json.JSONDecodeError as e:\n            logger.error(f\"Failed to parse JSON data: {e}\")\n            logger.debug(f\"Raw data: {sse_msg.data}\")\n            return None\n\n        try:\n            streaming_event = StreamingEvent(root=data)\n        except ValidationError:\n            if self.skip_unknown_events:\n                logger.debug(f\"Skipping unknown event: {sse_msg.event}\")\n                return None\n            logger.warning(\n                \"Failed to validate streaming event. Falling back to UnknownEvent.\"\n            )\n            logger.debug(f\"Event data: {data}\")\n            streaming_event = StreamingEvent(\n                root=UnknownEvent(type=sse_msg.event, data=data)\n            )\n\n        return streaming_event\n\n    async def flush(self) -> AsyncIterator[StreamingEvent]:\n        \"\"\"\n        Flush any remaining data in the buffer.\n\n        This should be called when the stream ends to process any incomplete messages.\n\n        Yields:\n            Any remaining StreamingEvent objects\n        \"\"\"\n        if self.buffer.strip():\n            logger.warning(f\"Flushing incomplete buffer: {self.buffer[:100]}...\")\n\n            self.buffer += \"\\n\\n\"\n\n            async for event in self._process_buffer():\n                yield event\n"
  },
  {
    "path": "app/services/event_processing/event_serializer.py",
    "content": "import json\nfrom typing import AsyncIterator, Optional\n\nfrom app.models.streaming import StreamingEvent, UnknownEvent\n\n\nclass EventSerializer:\n    \"\"\"Serializes StreamingEvent objects into SSE (Server-Sent Events) format.\"\"\"\n\n    def __init__(self, skip_unknown_events: bool = True):\n        self.skip_unknown_events = skip_unknown_events\n\n    async def serialize_stream(\n        self, events: AsyncIterator[StreamingEvent]\n    ) -> AsyncIterator[str]:\n        \"\"\"\n        Serialize a stream of StreamingEvent objects into SSE format.\n\n        Args:\n            events: AsyncIterator that yields StreamingEvent objects\n\n        Yields:\n            String chunks in SSE format\n        \"\"\"\n        async for event in events:\n            sse_message = self.serialize_event(event)\n            if sse_message:\n                yield sse_message\n\n    def serialize_event(self, event: StreamingEvent) -> Optional[str]:\n        \"\"\"\n        Serialize a single StreamingEvent into SSE format.\n\n        Args:\n            event: StreamingEvent object to serialize\n\n        Returns:\n            SSE formatted string or None if serialization fails\n        \"\"\"\n        if isinstance(event.root, UnknownEvent):\n            if self.skip_unknown_events:\n                return None\n            json_data = json.dumps(\n                event.root.data, ensure_ascii=False, separators=(\",\", \":\")\n            )\n        else:\n            json_data = event.model_dump_json(exclude_none=True)\n\n        sse_parts = []\n\n        if event.root.type:\n            sse_parts.append(f\"event: {event.root.type}\")\n\n        data_lines = json_data.split(\"\\n\")\n        for line in data_lines:\n            sse_parts.append(f\"data: {line}\")\n\n        sse_message = \"\\n\".join(sse_parts) + \"\\n\\n\"\n\n        return sse_message\n\n    async def serialize_batch(self, events: list[StreamingEvent]) -> str:\n        \"\"\"\n        Serialize a batch of StreamingEvent objects into a single SSE string.\n\n        Args:\n            events: List of StreamingEvent objects to serialize\n\n        Returns:\n            Concatenated SSE formatted string\n        \"\"\"\n        result_parts = []\n\n        for event in events:\n            sse_message = self.serialize_event(event)\n            if sse_message:\n                result_parts.append(sse_message)\n\n        return \"\".join(result_parts)\n"
  },
  {
    "path": "app/services/i18n.py",
    "content": "import json\nimport re\nfrom typing import Dict, Any, Optional\nfrom loguru import logger\n\nfrom app.core.config import settings\n\n\nclass I18nService:\n    \"\"\"\n    Internationalization service for loading and managing translations.\n    Supports message interpolation with context variables.\n    \"\"\"\n\n    def __init__(self):\n        self._translations: Dict[str, Dict[str, Any]] = {}\n        self._default_language = settings.default_language\n        self._locales_dir = settings.locales_folder\n        self._load_translations()\n\n    def _load_translations(self) -> None:\n        \"\"\"Load all translation files from the locales directory.\"\"\"\n        if not self._locales_dir.exists():\n            logger.warning(f\"Locales directory not found: {self._locales_dir}\")\n            return\n\n        for file_path in self._locales_dir.glob(\"*.json\"):\n            language_code = file_path.stem\n            try:\n                with open(file_path, \"r\", encoding=\"utf-8\") as f:\n                    self._translations[language_code] = json.load(f)\n                logger.info(f\"Loaded translations for language: {language_code}\")\n            except Exception as e:\n                logger.error(f\"Failed to load translations for {language_code}: {e}\")\n\n    def _get_nested_value(self, data: Dict[str, Any], key: str) -> Optional[str]:\n        \"\"\"\n        Get a nested value from a dictionary using dot notation.\n        Example: 'global.internalServerError' -> data['global']['internalServerError']\n        \"\"\"\n        keys = key.split(\".\")\n        current = data\n\n        for k in keys:\n            if isinstance(current, dict) and k in current:\n                current = current[k]\n            else:\n                return None\n\n        return current if isinstance(current, str) else None\n\n    def _interpolate_message(self, message: str, context: Dict[str, Any]) -> str:\n        \"\"\"\n        Interpolate context variables into the message.\n        Supports {variable_name} syntax.\n        \"\"\"\n        if not context:\n            return message\n\n        # Use regex to find all {variable_name} patterns and replace them\n        def replace_var(match):\n            var_name = match.group(1)\n            return str(context.get(var_name, match.group(0)))\n\n        return re.sub(r\"\\{([^}]+)\\}\", replace_var, message)\n\n    def get_message(\n        self,\n        message_key: str,\n        language: str = None,\n        context: Optional[Dict[str, Any]] = None,\n    ) -> str:\n        \"\"\"\n        Get a translated message by key and language.\n\n        Args:\n            message_key: The message key in dot notation (e.g., 'global.internalServerError')\n            language: The language code (defaults to default language)\n            context: Context variables for message interpolation\n\n        Returns:\n            The translated and interpolated message\n        \"\"\"\n        if language is None:\n            language = self._default_language\n\n        if language in self._translations:\n            message = self._get_nested_value(self._translations[language], message_key)\n            if message:\n                return self._interpolate_message(message, context or {})\n\n        if (\n            language != self._default_language\n            and self._default_language in self._translations\n        ):\n            message = self._get_nested_value(\n                self._translations[self._default_language], message_key\n            )\n            if message:\n                return self._interpolate_message(message, context or {})\n\n        logger.warning(\n            f\"Translation not found for key '{message_key}' in language '{language}'\"\n        )\n        return message_key\n\n    def parse_accept_language(self, accept_language: Optional[str]) -> str:\n        \"\"\"\n        Parse Accept-Language header and return the best matching language.\n\n        Args:\n            accept_language: The Accept-Language header value\n\n        Returns:\n            The best matching language code\n        \"\"\"\n        if not accept_language:\n            return self._default_language\n        languages = []\n        for lang_part in accept_language.split(\",\"):\n            lang_part = lang_part.strip()\n            if \";\" in lang_part:\n                lang, quality = lang_part.split(\";\", 1)\n                try:\n                    q = float(quality.split(\"=\")[1])\n                except (IndexError, ValueError):\n                    q = 1.0\n            else:\n                lang = lang_part\n                q = 1.0\n\n            primary_lang = lang.split(\"-\")[0].lower()\n            languages.append((primary_lang, q))\n\n        languages.sort(key=lambda x: x[1], reverse=True)\n\n        for lang, _ in languages:\n            if lang in self._translations:\n                return lang\n\n        return self._default_language\n\n    def get_supported_languages(self) -> list[str]:\n        \"\"\"Get list of supported language codes.\"\"\"\n        return list(self._translations.keys())\n\n    def reload_translations(self) -> None:\n        \"\"\"Reload all translation files.\"\"\"\n        self._translations.clear()\n        self._load_translations()\n\n\ni18n_service = I18nService()\n"
  },
  {
    "path": "app/services/oauth.py",
    "content": "import base64\nimport hashlib\nimport secrets\nimport time\nfrom typing import Dict, List, Optional, Tuple\nfrom urllib.parse import urlparse, parse_qs\n\nfrom app.core.http_client import Response, create_session, create_plain_session\nfrom loguru import logger\n\nfrom app.core.config import settings\nfrom app.core.account import Account, AuthType, OAuthToken\nfrom app.core.exceptions import (\n    AppError,\n    ClaudeAuthenticationError,\n    ClaudeHttpError,\n    CloudflareBlockedError,\n    CookieAuthorizationError,\n    OAuthExchangeError,\n    OrganizationInfoError,\n)\n\n\nclass OAuthAuthenticator:\n    \"\"\"OAuth authenticator for Claude accounts using cookies.\"\"\"\n\n    def _generate_pkce(self) -> Tuple[str, str]:\n        \"\"\"Generate PKCE verifier and challenge.\"\"\"\n        verifier = (\n            base64.urlsafe_b64encode(secrets.token_bytes(32))\n            .decode(\"utf-8\")\n            .rstrip(\"=\")\n        )\n        challenge = (\n            base64.urlsafe_b64encode(hashlib.sha256(verifier.encode(\"utf-8\")).digest())\n            .decode(\"utf-8\")\n            .rstrip(\"=\")\n        )\n        return verifier, challenge\n\n    def _build_headers(self, cookie: str) -> Dict[str, str]:\n        \"\"\"Build request headers.\"\"\"\n        claude_endpoint = settings.claude_ai_url.encoded_string().rstrip(\"/\")\n\n        return {\n            \"Accept\": \"application/json\",\n            \"Accept-Language\": \"en-US,en;q=0.9\",\n            \"Cache-Control\": \"no-cache\",\n            \"Cookie\": cookie,\n            \"Origin\": claude_endpoint,\n            \"Referer\": f\"{claude_endpoint}/new\",\n            \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36\",\n        }\n\n    async def _request(self, method: str, url: str, **kwargs) -> Response:\n        \"\"\"Browser-impersonating request — for claude.ai endpoints (Cloudflare).\"\"\"\n        session = create_session(\n            timeout=settings.request_timeout,\n            impersonate=\"chrome\",\n            proxy=settings.proxy_url,\n            follow_redirects=False,\n        )\n        async with session:\n            response: Response = await session.request(method=method, url=url, **kwargs)\n\n        if response.status_code == 302:\n            raise CloudflareBlockedError()\n\n        if response.status_code == 403:\n            raise ClaudeAuthenticationError()\n\n        if response.status_code >= 300:\n            raise ClaudeHttpError(\n                url=url,\n                status_code=response.status_code,\n                error_type=\"Unknown\",\n                error_message=\"Error occurred during request to Claude.ai\",\n            )\n\n        return response\n\n    async def _token_request(self, url: str, data: dict) -> Response:\n        \"\"\"Plain (non-impersonating) POST to the OAuth token endpoint.\n\n        console.anthropic.com/v1/oauth/token rejects requests that carry\n        browser fingerprinting headers (User-Agent, Origin, TLS JA3).\n        Using httpx here avoids the 429.\n        \"\"\"\n        session = create_plain_session(\n            timeout=settings.request_timeout,\n            proxy=settings.proxy_url,\n            follow_redirects=False,\n        )\n        async with session:\n            response: Response = await session.request(\n                method=\"POST\",\n                url=url,\n                data=data,\n                headers={\n                    \"Content-Type\": \"application/x-www-form-urlencoded\",\n                    \"User-Agent\": \"claude-cli/2.1.81 (external, cli)\",\n                },\n            )\n        if response.status_code != 200:\n            try:\n                error_body = await response.json()\n            except Exception:\n                error_body = \"<non-JSON body>\"\n            logger.error(\n                f\"Token endpoint returned {response.status_code}: {error_body}\"\n            )\n        return response\n\n    async def get_organization_info(self, cookie: str) -> Tuple[str, List[str]]:\n        \"\"\"Get organization UUID and capabilities.\"\"\"\n        url = f\"{settings.claude_ai_url.encoded_string().rstrip('/')}/api/organizations\"\n        headers = self._build_headers(cookie)\n\n        try:\n            response = await self._request(\"GET\", url, headers=headers)\n\n            org_data = await response.json()\n            if org_data and isinstance(org_data, list):\n                organization_uuid = None\n                max_capabilities = []\n\n                for org in org_data:\n                    if \"uuid\" in org and \"capabilities\" in org:\n                        capabilities = org.get(\"capabilities\", [])\n\n                        if \"chat\" not in capabilities:\n                            continue\n\n                        if len(capabilities) > len(max_capabilities):\n                            organization_uuid = org.get(\"uuid\")\n                            max_capabilities = capabilities\n\n                if organization_uuid:\n                    logger.info(\n                        f\"Found organization UUID: {organization_uuid}, capabilities: {max_capabilities}\"\n                    )\n                    return organization_uuid, max_capabilities\n\n                raise OrganizationInfoError(\n                    reason=\"No valid organization found with chat capabilities\"\n                )\n\n            else:\n                logger.error(\"No organization data found in response\")\n                raise OrganizationInfoError(reason=\"No organization data found\")\n\n        except AppError as e:\n            raise e\n\n        except Exception as e:\n            logger.error(f\"Error getting organization UUID: {e}\")\n            raise OrganizationInfoError(reason=str(e))\n\n    async def authorize_with_cookie(\n        self, cookie: str, organization_uuid: str\n    ) -> Tuple[str, str]:\n        \"\"\"\n        Use Cookie to automatically get authorization code.\n        Returns: (authorization code, verifier)\n        \"\"\"\n        verifier, challenge = self._generate_pkce()\n        state = (\n            base64.urlsafe_b64encode(secrets.token_bytes(32))\n            .decode(\"utf-8\")\n            .rstrip(\"=\")\n        )\n\n        authorize_url = settings.oauth_authorize_url.format(\n            organization_uuid=organization_uuid\n        )\n\n        payload = {\n            \"response_type\": \"code\",\n            \"client_id\": settings.oauth_client_id,\n            \"organization_uuid\": organization_uuid,\n            \"redirect_uri\": settings.oauth_redirect_uri,\n            \"scope\": \"user:profile user:inference\",\n            \"state\": state,\n            \"code_challenge\": challenge,\n            \"code_challenge_method\": \"S256\",\n        }\n\n        headers = self._build_headers(cookie)\n        headers[\"Content-Type\"] = \"application/json\"\n\n        logger.debug(f\"Requesting authorization from: {authorize_url}\")\n\n        response = await self._request(\n            \"POST\", authorize_url, json=payload, headers=headers\n        )\n\n        auth_response = await response.json()\n        redirect_uri = auth_response.get(\"redirect_uri\")\n\n        if not redirect_uri:\n            logger.error(\"No redirect_uri in authorization response\")\n            raise CookieAuthorizationError(reason=\"No redirect URI found in response\")\n\n        logger.info(f\"Got redirect URI: {redirect_uri}\")\n\n        parsed_url = urlparse(redirect_uri)\n        query_params = parse_qs(parsed_url.query)\n\n        if \"code\" not in query_params:\n            logger.error(\"No authorization code in redirect_uri\")\n            raise CookieAuthorizationError(\n                reason=\"No authorization code found in response\"\n            )\n\n        auth_code = query_params[\"code\"][0]\n        response_state = query_params.get(\"state\", [None])[0]\n\n        logger.info(f\"Extracted authorization code: {auth_code[:20]}...\")\n\n        if response_state:\n            full_code = f\"{auth_code}#{response_state}\"\n        else:\n            full_code = auth_code\n\n        return full_code, verifier\n\n    async def exchange_token(self, code: str, verifier: str) -> Dict:\n        \"\"\"Exchange authorization code for access token.\"\"\"\n        parts = code.split(\"#\")\n        auth_code = parts[0]\n        state = parts[1] if len(parts) > 1 else None\n\n        data = {\n            \"code\": auth_code,\n            \"grant_type\": \"authorization_code\",\n            \"client_id\": settings.oauth_client_id,\n            \"redirect_uri\": settings.oauth_redirect_uri,\n            \"code_verifier\": verifier,\n        }\n\n        if state:\n            data[\"state\"] = state\n\n        try:\n            response = await self._token_request(settings.oauth_token_url, data)\n\n            token_data = await response.json()\n\n            if (\n                \"access_token\" not in token_data\n                or \"refresh_token\" not in token_data\n                or \"expires_in\" not in token_data\n            ):\n                logger.error(\"Invalid token response received\")\n                raise OAuthExchangeError(reason=\"Invalid token response\")\n\n            return token_data\n\n        except AppError as e:\n            raise e\n\n        except Exception as e:\n            logger.error(f\"Error exchanging token: {e}\")\n            raise OAuthExchangeError(reason=str(e))\n\n    async def refresh_access_token(self, refresh_token: str) -> Optional[Dict]:\n        \"\"\"Refresh access token.\"\"\"\n        data = {\n            \"grant_type\": \"refresh_token\",\n            \"refresh_token\": refresh_token,\n            \"client_id\": settings.oauth_client_id,\n        }\n\n        try:\n            response = await self._token_request(settings.oauth_token_url, data)\n\n            if response.status_code != 200:\n                logger.error(f\"Token refresh failed: {response.status_code}\")\n                return None\n\n            token_data = await response.json()\n            return token_data\n\n        except Exception as e:\n            logger.error(f\"Error refreshing token: {e}\")\n            return None\n\n    async def authenticate_account(self, account: Account) -> bool:\n        \"\"\"\n        Authenticate an account using OAuth.\n        Returns True if successful, False otherwise.\n        \"\"\"\n        if not account.cookie_value:\n            logger.error(\"Account has no cookie value\")\n            return False\n\n        try:\n            # Get organization UUID\n            org_uuid, _ = await self.get_organization_info(account.cookie_value)\n\n            # Get authorization code\n            auth_result = await self.authorize_with_cookie(\n                account.cookie_value, org_uuid\n            )\n\n            auth_code, verifier = auth_result\n\n            # Exchange for tokens\n            token_data = await self.exchange_token(auth_code, verifier)\n\n            # Update account with OAuth tokens\n            account.oauth_token = OAuthToken(\n                access_token=token_data[\"access_token\"],\n                refresh_token=token_data[\"refresh_token\"],\n                expires_at=time.time() + token_data[\"expires_in\"],\n            )\n            account.auth_type = AuthType.BOTH\n            account.save()\n\n            logger.info(\n                f\"Successfully authenticated account with OAuth: {account.organization_uuid[:8]}...\"\n            )\n            return True\n\n        except Exception as e:\n            logger.error(f\"OAuth authentication failed: {e}\")\n            return False\n\n    async def refresh_account_token(self, account: Account) -> bool:\n        \"\"\"\n        Refresh OAuth token for an account.\n        Returns True if successful, False otherwise.\n        \"\"\"\n        if not account.oauth_token or not account.oauth_token.refresh_token:\n            logger.error(\"Account has no refresh token\")\n            return False\n\n        token_data = await self.refresh_access_token(account.oauth_token.refresh_token)\n        if not token_data:\n            return False\n\n        account.oauth_token = OAuthToken(\n            access_token=token_data[\"access_token\"],\n            refresh_token=token_data[\"refresh_token\"],\n            expires_at=time.time() + token_data[\"expires_in\"],\n        )\n        account.save()\n\n        logger.info(\n            f\"Successfully refreshed OAuth token for account: {account.organization_uuid[:8]}...\"\n        )\n        return True\n\n\noauth_authenticator = OAuthAuthenticator()\n"
  },
  {
    "path": "app/services/session.py",
    "content": "import asyncio\nfrom typing import Dict, Optional\nfrom datetime import datetime, timedelta\nimport threading\nfrom loguru import logger\n\nfrom app.core.config import settings\nfrom app.core.claude_session import ClaudeWebSession\n\n\nclass SessionManager:\n    \"\"\"\n    Singleton manager for Claude sessions with automatic cleanup.\n    \"\"\"\n\n    _instance: Optional[\"SessionManager\"] = None\n    _lock = threading.Lock()\n\n    def __new__(cls):\n        \"\"\"Implement singleton pattern.\"\"\"\n        if cls._instance is None:\n            with cls._lock:\n                if cls._instance is None:\n                    cls._instance = super().__new__(cls)\n        return cls._instance\n\n    def __init__(self):\n        \"\"\"Initialize the SessionManager.\"\"\"\n        self._sessions: Dict[str, ClaudeWebSession] = {}\n        self._session_lock = asyncio.Lock()\n        self._cleanup_task: Optional[asyncio.Task] = None\n        self._session_timeout = settings.session_timeout\n        self._cleanup_interval = settings.session_cleanup_interval\n\n        logger.info(\n            f\"SessionManager initialized with timeout={self._session_timeout}s, \"\n            f\"cleanup_interval={self._cleanup_interval}s\"\n        )\n\n    async def get_or_create_session(self, session_id: str) -> ClaudeWebSession:\n        \"\"\"\n        Get or create a new Claude session.\n\n        Args:\n            session_id: Unique identifier for the session\n\n        Returns:\n            Created ClaudeSession instance\n        \"\"\"\n        async with self._session_lock:\n            if session_id in self._sessions:\n                return self._sessions[session_id]\n\n            session = ClaudeWebSession(session_id)\n            await session.initialize()\n            self._sessions[session_id] = session\n\n            logger.debug(f\"Created new session: {session_id}\")\n            return session\n\n    async def get_session(self, session_id: str) -> Optional[ClaudeWebSession]:\n        \"\"\"\n        Get a session by ID.\n\n        Args:\n            session_id: Unique identifier for the session\n\n        Returns:\n            ClaudeSession instance if found, None otherwise\n        \"\"\"\n        async with self._session_lock:\n            session = self._sessions.get(session_id)\n\n            if session:\n                # Check if session is expired\n                if await self._is_session_expired(session):\n                    logger.debug(f\"Session {session_id} is expired, removing\")\n                    await self._remove_session(session_id)\n                    return None\n\n            return session\n\n    async def remove_session(self, session_id: str) -> None:\n        \"\"\"\n        Remove a session by ID.\n\n        Args:\n            session_id: Unique identifier for the session\n        \"\"\"\n        async with self._session_lock:\n            if session_id in self._sessions:\n                await self._remove_session(session_id)\n\n    async def _is_session_expired(self, session: ClaudeWebSession) -> bool:\n        \"\"\"\n        Check if a session is expired.\n\n        A session is considered expired if its last_activity is older than session_timeout.\n\n        Args:\n            session: Session to check\n\n        Returns:\n            True if session is expired, False otherwise\n        \"\"\"\n        current_time = datetime.now()\n        timeout_duration = timedelta(seconds=self._session_timeout)\n\n        return (current_time - session.last_activity) > timeout_duration\n\n    async def _remove_session(self, session_id: str) -> None:\n        \"\"\"\n        Remove a session and cleanup its resources.\n\n        Note: This method should be called while holding the session lock.\n\n        Args:\n            session_id: ID of the session to remove\n        \"\"\"\n        if session_id in self._sessions:\n            session = self._sessions[session_id]\n            asyncio.create_task(session.cleanup())  # Cleanup session asynchronously\n\n            # Remove from sessions dict (should already have the lock)\n            if session_id in self._sessions:\n                del self._sessions[session_id]\n            logger.debug(f\"Removed session: {session_id}\")\n\n    async def start_cleanup_task(self) -> None:\n        \"\"\"Start the background task for cleaning up expired sessions.\"\"\"\n        if self._cleanup_task is None or self._cleanup_task.done():\n            self._cleanup_task = asyncio.create_task(self._cleanup_loop())\n            logger.info(\"Started session cleanup task\")\n\n    async def stop_cleanup_task(self) -> None:\n        \"\"\"Stop the background cleanup task.\"\"\"\n        if self._cleanup_task and not self._cleanup_task.done():\n            self._cleanup_task.cancel()\n            try:\n                await self._cleanup_task\n            except asyncio.CancelledError:\n                pass\n            logger.info(\"Stopped session cleanup task\")\n\n    async def _cleanup_loop(self) -> None:\n        \"\"\"Background loop to clean up expired sessions.\"\"\"\n        while True:\n            try:\n                await self._cleanup_expired_sessions()\n                await asyncio.sleep(self._cleanup_interval)\n            except asyncio.CancelledError:\n                break\n            except Exception as e:\n                logger.error(f\"Error in cleanup loop: {e}\")\n                await asyncio.sleep(self._cleanup_interval)\n\n    async def _cleanup_expired_sessions(self) -> None:\n        \"\"\"Clean up all expired sessions.\"\"\"\n        async with self._session_lock:\n            expired_sessions = []\n\n            for session_id, session in self._sessions.items():\n                if await self._is_session_expired(session):\n                    expired_sessions.append(session_id)\n\n            for session_id in expired_sessions:\n                await self._remove_session(session_id)\n\n            if expired_sessions:\n                logger.info(f\"Cleaned up {len(expired_sessions)} expired sessions\")\n\n    async def cleanup_all(self) -> None:\n        \"\"\"Clean up all sessions and stop the cleanup task.\"\"\"\n        await self.stop_cleanup_task()\n\n        async with self._session_lock:\n            session_ids = list(self._sessions.keys())\n\n            for session_id in session_ids:\n                await self._remove_session(session_id)\n\n        logger.info(\"Cleaned up all sessions\")\n\n    def __repr__(self) -> str:\n        \"\"\"String representation of the SessionManager.\"\"\"\n        return f\"<SessionManager sessions={len(self._sessions)}>\"\n\n\nsession_manager = SessionManager()\n"
  },
  {
    "path": "app/services/tool_call.py",
    "content": "import asyncio\nfrom typing import Dict, Optional\nfrom datetime import datetime, timedelta\nimport threading\nfrom loguru import logger\n\nfrom app.core.config import settings\n\n\nclass ToolCallState:\n    \"\"\"State for a pending tool call.\"\"\"\n\n    def __init__(self, tool_use_id: str, session_id: str):\n        self.tool_use_id = tool_use_id\n        self.session_id = session_id\n        self.created_at = datetime.now()\n        self.message_id: Optional[str] = None\n\n\nclass ToolCallManager:\n    \"\"\"\n    Singleton manager for tool call states.\n    \"\"\"\n\n    _instance: Optional[\"ToolCallManager\"] = None\n    _lock = threading.Lock()\n\n    def __new__(cls):\n        \"\"\"Implement singleton pattern.\"\"\"\n        if cls._instance is None:\n            with cls._lock:\n                if cls._instance is None:\n                    cls._instance = super().__new__(cls)\n        return cls._instance\n\n    def __init__(self):\n        \"\"\"Initialize the ToolCallManager.\"\"\"\n        self._tool_calls: Dict[str, ToolCallState] = {}\n        self._cleanup_task: Optional[asyncio.Task] = None\n        self._tool_call_timeout = settings.tool_call_timeout\n        self._cleanup_interval = settings.tool_call_cleanup_interval\n\n        logger.info(\n            f\"ToolCallManager initialized with timeout={self._tool_call_timeout}s, \"\n            f\"cleanup_interval={self._cleanup_interval}s\"\n        )\n\n    def register_tool_call(\n        self, tool_use_id: str, session_id: str, message_id: Optional[str] = None\n    ) -> None:\n        \"\"\"\n        Register a new tool call.\n\n        Args:\n            tool_use_id: Unique identifier for the tool use\n            session_id: Session ID associated with this tool call\n            message_id: Optional message ID for tracking\n        \"\"\"\n        tool_call_state = ToolCallState(tool_use_id, session_id)\n        tool_call_state.message_id = message_id\n\n        self._tool_calls[tool_use_id] = tool_call_state\n\n        logger.info(f\"Registered tool call: {tool_use_id} for session: {session_id}\")\n\n    def get_tool_call(self, tool_use_id: str) -> Optional[ToolCallState]:\n        \"\"\"\n        Get a tool call state by ID.\n\n        Args:\n            tool_use_id: Tool use ID to lookup\n\n        Returns:\n            ToolCallState if found, None otherwise\n        \"\"\"\n        return self._tool_calls.get(tool_use_id)\n\n    def complete_tool_call(self, tool_use_id: str) -> None:\n        \"\"\"\n        Mark a tool call as completed and return the associated session ID.\n\n        Args:\n            tool_use_id: Tool use ID to complete\n        \"\"\"\n        tool_call = self._tool_calls.get(tool_use_id)\n        if tool_call:\n            del self._tool_calls[tool_use_id]\n\n        logger.info(f\"Completed tool call: {tool_use_id}\")\n\n    async def start_cleanup_task(self) -> None:\n        \"\"\"Start the background task for cleaning up expired tool calls.\"\"\"\n        if self._cleanup_task is None or self._cleanup_task.done():\n            self._cleanup_task = asyncio.create_task(self._cleanup_loop())\n            logger.info(\"Started tool call cleanup task\")\n\n    async def stop_cleanup_task(self) -> None:\n        \"\"\"Stop the background cleanup task.\"\"\"\n        if self._cleanup_task and not self._cleanup_task.done():\n            self._cleanup_task.cancel()\n            try:\n                await self._cleanup_task\n            except asyncio.CancelledError:\n                pass\n            logger.info(\"Stopped tool call cleanup task\")\n\n    async def _cleanup_loop(self) -> None:\n        \"\"\"Background loop to clean up expired tool calls.\"\"\"\n        while True:\n            try:\n                self._cleanup_expired_tool_calls()\n                await asyncio.sleep(self._cleanup_interval)\n            except asyncio.CancelledError:\n                break\n            except Exception as e:\n                logger.error(f\"Error in tool call cleanup loop: {e}\")\n                await asyncio.sleep(self._cleanup_interval)\n\n    def _cleanup_expired_tool_calls(self) -> None:\n        \"\"\"Clean up all expired tool calls.\"\"\"\n        current_time = datetime.now()\n        timeout_duration = timedelta(seconds=self._tool_call_timeout)\n        expired_tool_calls = []\n\n        for tool_use_id, tool_call in self._tool_calls.items():\n            if (current_time - tool_call.created_at) > timeout_duration:\n                expired_tool_calls.append(tool_use_id)\n\n        for tool_use_id in expired_tool_calls:\n            tool_call = self._tool_calls[tool_use_id]\n            del self._tool_calls[tool_use_id]\n\n        if expired_tool_calls:\n            logger.info(f\"Cleaned up {len(expired_tool_calls)} expired tool calls\")\n\n    async def cleanup_all(self) -> None:\n        \"\"\"Clean up all tool calls and stop the cleanup task.\"\"\"\n        await self.stop_cleanup_task()\n        self._tool_calls.clear()\n        logger.info(\"Cleaned up all tool calls\")\n\n    def __repr__(self) -> str:\n        \"\"\"String representation of the ToolCallManager.\"\"\"\n        return f\"<ToolCallManager tool_calls={len(self._tool_calls)}>\"\n\n\ntool_call_manager = ToolCallManager()\n"
  },
  {
    "path": "app/utils/__init__.py",
    "content": ""
  },
  {
    "path": "app/utils/logger.py",
    "content": "import sys\nfrom pathlib import Path\nfrom loguru import logger\n\nfrom app.core.config import settings\n\n\ndef configure_logger():\n    \"\"\"Initialize the logger with console and optional file output.\"\"\"\n    logger.remove()\n\n    logger.add(\n        sys.stdout,\n        level=settings.log_level.upper(),\n        colorize=True,\n    )\n\n    if settings.log_to_file:\n        log_file = Path(settings.log_file_path)\n        log_file.parent.mkdir(parents=True, exist_ok=True)\n\n        logger.add(\n            settings.log_file_path,\n            level=settings.log_level.upper(),\n            rotation=settings.log_file_rotation,\n            retention=settings.log_file_retention,\n            compression=settings.log_file_compression,\n            enqueue=True,\n            encoding=\"utf-8\",\n        )\n"
  },
  {
    "path": "app/utils/messages.py",
    "content": "import base64\nfrom typing import List, Optional, Tuple\nfrom loguru import logger\n\nfrom app.core.http_client import download_image\nfrom app.core.config import settings\nfrom app.core.exceptions import ExternalImageDownloadError, ExternalImageNotAllowedError\nfrom app.models.claude import (\n    ImageType,\n    InputMessage,\n    Role,\n    ServerToolUseContent,\n    TextContent,\n    ImageContent,\n    ThinkingContent,\n    ToolResultContent,\n    ToolUseContent,\n    URLImageSource,\n    Base64ImageSource,\n)\n\n\nasync def process_messages(\n    messages: List[InputMessage], system: Optional[str | List[TextContent]] = None\n) -> Tuple[str, List[Base64ImageSource]]:\n    if isinstance(system, str):\n        merged_text = system\n    elif system:\n        merged_text = \"\\n\".join(item.text for item in system)\n    else:\n        merged_text = \"\"\n\n    if settings.use_real_roles:\n        human_prefix = f\"\\x08{settings.human_name}: \"\n        assistant_prefix = f\"\\x08{settings.assistant_name}: \"\n    else:\n        human_prefix = f\"{settings.human_name}: \"\n        assistant_prefix = f\"{settings.assistant_name}: \"\n\n    images: List[Base64ImageSource] = []\n    current_role = Role.USER\n\n    for message in messages:\n        if message.role != current_role:\n            if merged_text.endswith(\"\\n\"):\n                merged_text = merged_text[:-1]\n\n            if message.role == Role.USER:\n                merged_text += f\"\\n\\n{human_prefix}\"\n            elif message.role == Role.ASSISTANT:\n                merged_text += f\"\\n\\n{assistant_prefix}\"\n\n        current_role = message.role\n\n        if isinstance(message.content, str):\n            merged_text += f\"{message.content}\\n\"\n        else:\n            for block in message.content:\n                if isinstance(block, TextContent):\n                    merged_text += f\"{block.text}\\n\"\n                elif isinstance(block, ThinkingContent):\n                    merged_text += f\"<\\x08antml:thinking>\\n{block.thinking}\\n</\\x08antml:thinking>\\n\"\n                elif isinstance(block, ToolUseContent) or isinstance(\n                    block, ServerToolUseContent\n                ):\n                    merged_text += f'<\\x08antml:function_calls>\\n<\\x08antml:invoke name=\"{block.name}\">\\n'\n                    for key, value in block.input.items():\n                        merged_text += f'<\\x08antml:parameter name=\"{key}\">{value}</\\x08antml:parameter>\\n'\n                    merged_text += \"</\\x08antml:invoke>\\n</\\x08antml:function_calls>\\n\"\n                elif isinstance(block, ToolResultContent):\n                    text_content = \"\"\n                    if isinstance(block.content, str):\n                        text_content = f\"{block.content}\"\n                    else:\n                        for content_block in block.content:\n                            if isinstance(content_block, TextContent):\n                                text_content += f\"{content_block.text}\\n\"\n                            elif isinstance(content_block, ImageContent):\n                                if isinstance(content_block.source, Base64ImageSource):\n                                    images.append(content_block.source)\n                                elif isinstance(content_block.source, URLImageSource):\n                                    image_source = await extract_image_from_url(\n                                        content_block.source.url\n                                    )\n                                    if image_source:\n                                        images.append(image_source)\n                                        text_content += \"(image attached)\\n\"\n                            if text_content.endswith(\"\\n\"):\n                                text_content = text_content[:-1]\n                    merged_text += (\n                        f\"<function_results>{text_content}</function_results>\"\n                    )\n                elif isinstance(block, ImageContent):\n                    if isinstance(block.source, Base64ImageSource):\n                        images.append(block.source)\n                    elif isinstance(block.source, URLImageSource):\n                        image_source = await extract_image_from_url(block.source.url)\n                        if image_source:\n                            images.append(image_source)\n\n        if merged_text.endswith(\"\\n\"):\n            merged_text = merged_text[:-1]\n\n    return (merged_text, images)\n\n\nasync def extract_image_from_url(url: str) -> Optional[Base64ImageSource]:\n    \"\"\"Extract base64 image from data URL or download from external URL.\"\"\"\n\n    if url.startswith(\"data:\"):\n        try:\n            metadata, base64_data = url.split(\",\", 1)\n            media_info = metadata[5:]\n            media_type, encoding = media_info.split(\";\", 1)\n\n            return Base64ImageSource(\n                type=encoding, media_type=media_type, data=base64_data\n            )\n        except Exception:\n            logger.warning(\"Failed to extract image from data URL. Skipping image.\")\n            return None\n\n    elif settings.allow_external_images and (\n        url.startswith(\"http://\") or url.startswith(\"https://\")\n    ):\n        try:\n            logger.debug(f\"Downloading external image: {url}\")\n\n            content, content_type = await download_image(\n                url, timeout=settings.request_timeout\n            )\n            base64_data = base64.b64encode(content).decode(\"utf-8\")\n\n            return Base64ImageSource(\n                type=\"base64\", media_type=ImageType(content_type), data=base64_data\n            )\n        except Exception:\n            raise ExternalImageDownloadError(url)\n\n    elif not settings.allow_external_images and (\n        url.startswith(\"http://\") or url.startswith(\"https://\")\n    ):\n        raise ExternalImageNotAllowedError(url)\n    else:\n        logger.warning(f\"Unsupported URL format: {url}, Skipping image.\")\n        return None\n"
  },
  {
    "path": "app/utils/retry.py",
    "content": "from loguru import logger\nfrom tenacity import RetryCallState\n\nfrom app.core.exceptions import AppError\n\n\ndef is_retryable_error(exception):\n    \"\"\"Check if the exception is an AppError with retryable=True\"\"\"\n    return isinstance(exception, AppError) and exception.retryable\n\n\ndef log_before_sleep(retry_state: RetryCallState) -> None:\n    \"\"\"Custom before_sleep callback that safely logs retry attempts.\"\"\"\n    attempt_number = retry_state.attempt_number\n    exception = retry_state.outcome.exception() if retry_state.outcome else None\n\n    if exception:\n        exception_type = type(exception).__name__\n        logger.warning(\n            f\"Retrying {retry_state.fn.__name__} after attempt {attempt_number} \"\n            f\"due to {exception_type}: {str(exception)}\"\n        )\n\n    else:\n        logger.warning(\n            f\"Retrying {retry_state.fn.__name__} after attempt {attempt_number}\"\n        )\n"
  },
  {
    "path": "docker-compose.yml",
    "content": "version: \"3.8\"\n\nservices:\n  clove:\n    build:\n      context: .\n      dockerfile: Dockerfile\n    container_name: clove\n    restart: unless-stopped\n    ports:\n      - \"5201:5201\"\n    volumes:\n      - ./data:/data\n    environment:\n      # Server configuration\n      - HOST=0.0.0.0\n      - PORT=5201\n\n      # Data storage\n      - DATA_FOLDER=/data\n\n      # API Keys (comma-separated)\n      # - API_KEYS=your-api-key-1,your-api-key-2\n      # - ADMIN_API_KEYS=your-admin-key-1,your-admin-key-2\n\n      # Claude cookies (comma-separated)\n      # - COOKIES=your-claude-cookie-1,your-claude-cookie-2\n\n      # Proxy configuration (optional)\n      # - PROXY_URL=http://proxy-server:port\n\n      # Claude URLs (optional, defaults are usually fine)\n      # - CLAUDE_AI_URL=https://claude.ai\n      # - CLAUDE_API_BASEURL=https://api.anthropic.com\n\n      - REQUEST_TIMEOUT=${REQUEST_TIMEOUT:-60}\n\n      # Logging\n      - LOG_LEVEL=INFO\n      - LOG_TO_FILE=true\n      - LOG_FILE_PATH=/data/logs/app.log\n\nvolumes:\n  data:\n    driver: local\n"
  },
  {
    "path": "pyproject.toml",
    "content": "[project]\nname = \"clove-proxy\"\nversion = \"0.3.1\"\ndescription = \"A Claude.ai reverse proxy\"\nreadme = \"README.md\"\nrequires-python = \">=3.11\"\nlicense = {text = \"MIT\"}\nauthors = [\n    {name = \"mirrorange\", email = \"orange@freesia.ink\"},\n]\nkeywords = [\"claude\", \"ai\", \"proxy\", \"fastapi\"]\nclassifiers = [\n    \"Development Status :: 4 - Beta\",\n    \"Intended Audience :: Education\",\n    \"License :: OSI Approved :: MIT License\",\n    \"Programming Language :: Python :: 3\",\n    \"Programming Language :: Python :: 3.11\",\n    \"Programming Language :: Python :: 3.12\",\n    \"Programming Language :: Python :: 3.13\",\n    \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\ndependencies = [\n    \"fastapi>=0.115.14\",\n    \"httpx>=0.28.1\",\n    \"json5>=0.12.0\",\n    \"loguru>=0.7.3\",\n    \"pydantic>=2.11.7\",\n    \"pydantic-settings>=2.10.1\",\n    \"tenacity>=9.1.2\",\n    \"tiktoken>=0.9.0\",\n    \"uvicorn>=0.35.0\",\n]\n\n[project.urls]\n\"Homepage\" = \"https://github.com/mirrorange/clove\"\n\"Bug Tracker\" = \"https://github.com/mirrorange/clove/issues\"\n\"Documentation\" = \"https://github.com/mirrorange/clove#readme\"\n\n[project.scripts]\nclove = \"app.main:main\"\n\n[project.optional-dependencies]\ncurl = [\n    \"curl-cffi>=0.11.4\",\n]\nrnet = [\n    \"rnet>=3.0.0rc14\",\n]\ndev = [\n    \"build>=1.0.0\",\n    \"ruff>=0.12.2\",\n]\n\n[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[tool.hatch.build]\ninclude = [\n    \"app/**/*\",\n    \"README.md\",\n    \"LICENSE\",\n]\nexclude = [\n    \"app/**/__pycache__\",\n    \"app/**/*.pyc\",\n    \"app/**/*.pyo\",\n    \"app/**/test_*.py\",\n    \"app/**/*_test.py\",\n]\n\n[tool.hatch.build.targets.wheel]\npackages = [\"app\"]\n\n[tool.hatch.build.targets.wheel.force-include]\n\"app/static\" = \"app/static\"\n\"app/locales\" = \"app/locales\"\n"
  },
  {
    "path": "scripts/build_wheel.py",
    "content": "#!/usr/bin/env python3\n\"\"\"Build script for Clove - builds frontend and creates Python wheel.\"\"\"\n\nimport argparse\nimport shutil\nimport subprocess\nimport sys\nfrom pathlib import Path\n\n\ndef run_command(cmd, cwd=None, check=True):\n    \"\"\"Run a shell command and return the result.\"\"\"\n    print(f\"Running: {' '.join(cmd)}\")\n    result = subprocess.run(cmd, cwd=cwd, capture_output=True, text=True)\n    if check and result.returncode != 0:\n        print(f\"Error: {result.stderr}\")\n        sys.exit(1)\n    return result\n\n\ndef clean_directories():\n    \"\"\"Clean build directories.\"\"\"\n    print(\"\\n📦 Cleaning build directories...\")\n    dirs_to_clean = [\"dist\", \"build\", \"app.egg-info\", \"clove.egg-info\"]\n    for dir_name in dirs_to_clean:\n        if Path(dir_name).exists():\n            shutil.rmtree(dir_name)\n            print(f\"  ✓ Removed {dir_name}\")\n\n\ndef check_node_installed():\n    \"\"\"Check if Node.js is installed.\"\"\"\n    try:\n        result = run_command([\"node\", \"--version\"], check=False)\n        if result.returncode == 0:\n            print(f\"  ✓ Node.js {result.stdout.strip()} detected\")\n            return True\n    except FileNotFoundError:\n        pass\n\n    print(\"  ✗ Node.js not found. Please install Node.js to build the frontend.\")\n    return False\n\n\ndef check_pnpm_installed():\n    \"\"\"Check if pnpm is installed.\"\"\"\n    try:\n        result = run_command([\"pnpm\", \"--version\"], check=False)\n        if result.returncode == 0:\n            print(f\"  ✓ pnpm {result.stdout.strip()} detected\")\n            return True\n    except FileNotFoundError:\n        pass\n\n    print(\"  ✗ pnpm not found. Installing pnpm...\")\n    run_command([\"npm\", \"install\", \"-g\", \"pnpm\"])\n    return True\n\n\ndef build_frontend():\n    \"\"\"Build the frontend application.\"\"\"\n    print(\"\\n🎨 Building frontend...\")\n\n    front_dir = Path(\"front\")\n    if not front_dir.exists():\n        print(\"  ✗ Frontend directory not found\")\n        return False\n\n    if not check_node_installed():\n        return False\n\n    if not check_pnpm_installed():\n        return False\n\n    if not (front_dir / \"node_modules\").exists():\n        print(\"  📦 Installing frontend dependencies...\")\n        run_command([\"pnpm\", \"install\"], cwd=front_dir)\n\n    print(\"  🔨 Building frontend assets...\")\n    run_command([\"pnpm\", \"run\", \"build\"], cwd=front_dir)\n\n    print(\"  📂 Copying built files to app/static...\")\n    static_dir = Path(\"app/static\")\n\n    if static_dir.exists():\n        shutil.rmtree(static_dir)\n\n    shutil.copytree(front_dir / \"dist\", static_dir)\n    print(\"  ✓ Frontend build complete\")\n\n    return True\n\n\ndef build_wheel():\n    \"\"\"Build the Python wheel.\"\"\"\n    print(\"\\n🐍 Building Python wheel...\")\n\n    try:\n        import build\n    except ImportError:\n        print(\"  📦 Installing build tool...\")\n        run_command([sys.executable, \"-m\", \"pip\", \"install\", \"build\"])\n\n    run_command([sys.executable, \"-m\", \"build\", \"--wheel\"])\n\n    dist_dir = Path(\"dist\")\n    if dist_dir.exists():\n        wheels = list(dist_dir.glob(\"*.whl\"))\n        if wheels:\n            print(f\"  ✓ Created wheel: {wheels[0].name}\")\n            return True\n\n    print(\"  ✗ Failed to create wheel\")\n    return False\n\n\ndef parse_args():\n    \"\"\"Parse command line arguments.\"\"\"\n    parser = argparse.ArgumentParser(\n        description=\"Build script for Clove - builds frontend and creates Python wheel.\"\n    )\n    parser.add_argument(\n        \"--skip-frontend\",\n        action=\"store_true\",\n        help=\"Skip frontend build and only build the Python wheel\",\n    )\n    parser.add_argument(\n        \"--no-clean\",\n        action=\"store_true\",\n        help=\"Skip cleaning build directories before building\",\n    )\n    return parser.parse_args()\n\n\ndef main():\n    \"\"\"Main build process.\"\"\"\n    args = parse_args()\n\n    print(\"🚀 Building Clove...\")\n\n    if not args.no_clean:\n        clean_directories()\n\n    if args.skip_frontend:\n        print(\"\\n⏭️  Frontend build skipped (--skip-frontend specified)\")\n        if not Path(\"app/static\").exists():\n            print(\n                \"⚠️  No static files found. The wheel will be built without frontend assets.\"\n            )\n            print(\n                \"   You may need to build the frontend separately or copy static files manually.\"\n            )\n    else:\n        frontend_built = build_frontend()\n        if not frontend_built:\n            print(\"\\n⚠️  Frontend build skipped. Using existing static files.\")\n            if not Path(\"app/static\").exists():\n                print(\n                    \"❌ No static files found. Please build frontend manually or ensure app/static exists.\"\n                )\n                sys.exit(1)\n\n    if build_wheel():\n        print(\"\\n✅ Build complete!\")\n        print(\"\\n📦 Installation instructions:\")\n        print(\"  1. Install the wheel:\")\n        print(\"     pip install dist/*.whl\")\n        print(\"  2. Run Clove:\")\n        print(\"     clove\")\n        print(\"\\n📝 Note: You can also install in development mode:\")\n        print(\"     pip install -e .\")\n    else:\n        print(\"\\n❌ Build failed!\")\n        sys.exit(1)\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "tests/test_claude_request_models.py",
    "content": "import unittest\n\nfrom app.models.claude import MessagesAPIRequest\n\n\nclass MessagesAPIRequestToolParsingTests(unittest.TestCase):\n    def test_accepts_custom_tool_payload_without_top_level_input_schema(self) -> None:\n        request = MessagesAPIRequest.model_validate(\n            {\n                \"model\": \"claude-opus-4-20250514\",\n                \"max_tokens\": 1024,\n                \"messages\": [{\"role\": \"user\", \"content\": \"Search for the latest CNY USD rate\"}],\n                \"tools\": [\n                    {\n                        \"type\": \"custom\",\n                        \"name\": \"WebSearch\",\n                        \"custom\": {\n                            \"description\": \"Search the web for public information\",\n                            \"input_schema\": {\n                                \"type\": \"object\",\n                                \"properties\": {\n                                    \"query\": {\"type\": \"string\"},\n                                },\n                                \"required\": [\"query\"],\n                            },\n                        },\n                    }\n                ],\n            }\n        )\n\n        self.assertEqual(request.tools[0].name, \"WebSearch\")\n\n    def test_accepts_server_web_search_tool_without_input_schema(self) -> None:\n        request = MessagesAPIRequest.model_validate(\n            {\n                \"model\": \"claude-opus-4-20250514\",\n                \"max_tokens\": 1024,\n                \"messages\": [{\"role\": \"user\", \"content\": \"Search for the latest CNY USD rate\"}],\n                \"tools\": [\n                    {\n                        \"type\": \"web_search_20250305\",\n                        \"name\": \"web_search\",\n                        \"max_uses\": 5,\n                    }\n                ],\n            }\n        )\n\n        self.assertEqual(request.tools[0].name, \"web_search\")\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  }
]