Showing preview only (2,425K chars total). Download the full file or copy to clipboard to get everything.
Repository: pingcap/tidb.ai
Branch: main
Commit: c4cb19d8fa20
Files: 891
Total size: 2.1 MB
Directory structure:
gitextract_vw5kjonn/
├── .dockerignore
├── .github/
│ ├── actions/
│ │ └── decide/
│ │ ├── .gitignore
│ │ ├── action.yml
│ │ ├── index.js
│ │ └── package.json
│ └── workflows/
│ ├── backend-test.yml
│ ├── deploy.yml
│ ├── regression.yml
│ ├── release.yml
│ └── verify.yml
├── .gitignore
├── CONTRIBUTING.md
├── LICENSE.txt
├── README.md
├── backend/
│ ├── .dockerignore
│ ├── .gitignore
│ ├── .pre-commit-config.yaml
│ ├── .python-version
│ ├── Dockerfile
│ ├── Makefile
│ ├── README.md
│ ├── alembic.ini
│ ├── app/
│ │ ├── __init__.py
│ │ ├── alembic/
│ │ │ ├── env.py
│ │ │ ├── script.py.mako
│ │ │ └── versions/
│ │ │ ├── 00534dc350db_.py
│ │ │ ├── 041fbef26e3a_.py
│ │ │ ├── 04947f9684ab_public_chat_engine.py
│ │ │ ├── 04d4f05116ed_.py
│ │ │ ├── 04d81be446c3_.py
│ │ │ ├── 10f36e8a25c4_.py
│ │ │ ├── 197bc8be72d1_.py
│ │ │ ├── 211f3c5aa125_chunking_settings.py
│ │ │ ├── 27a6723b767a_.py
│ │ │ ├── 2adc0b597dcd_int_enum_type.py
│ │ │ ├── 2fc10c21bf88_.py
│ │ │ ├── 749767db5505_add_recommend_questions.py
│ │ │ ├── 8093333c0d87_.py
│ │ │ ├── 830fd9c44f39_.py
│ │ │ ├── 94b198e20946_.py
│ │ │ ├── a54f966436ce_evaluation.py
│ │ │ ├── a8c79553c9f6_.py
│ │ │ ├── ac6e4d58580d_.py
│ │ │ ├── bd17a4ebccc5_.py
│ │ │ ├── c7f016a904c1_.py
│ │ │ ├── d2ad44deab20_multiple_kb.py
│ │ │ ├── dfee070b8abd_.py
│ │ │ ├── e32f1e546eec_.py
│ │ │ └── eb0b85608c0a_.py
│ │ ├── api/
│ │ │ ├── __init__.py
│ │ │ ├── admin_routes/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chat/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── chat_engine.py
│ │ │ │ ├── document/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── embedding_model/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── models.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── evaluation/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── evaluation_dataset.py
│ │ │ │ │ ├── evaluation_task.py
│ │ │ │ │ ├── models.py
│ │ │ │ │ └── tools.py
│ │ │ │ ├── feedback.py
│ │ │ │ ├── knowledge_base/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── chunk/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── data_source/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── document/
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── graph/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── knowledge/
│ │ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ │ └── routes.py
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── models.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── langfuse.py
│ │ │ │ ├── legacy_retrieve.py
│ │ │ │ ├── llm/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── models.py
│ │ │ │ ├── reranker_model/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── semantic_cache.py
│ │ │ │ ├── site_setting.py
│ │ │ │ ├── stats.py
│ │ │ │ ├── upload.py
│ │ │ │ └── user.py
│ │ │ ├── deps.py
│ │ │ ├── main.py
│ │ │ └── routes/
│ │ │ ├── __init__.py
│ │ │ ├── api_key.py
│ │ │ ├── chat.py
│ │ │ ├── chat_engine.py
│ │ │ ├── document.py
│ │ │ ├── feedback.py
│ │ │ ├── index.py
│ │ │ ├── models.py
│ │ │ ├── retrieve/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── models.py
│ │ │ │ └── routes.py
│ │ │ └── user.py
│ │ ├── api_server.py
│ │ ├── auth/
│ │ │ ├── api_keys.py
│ │ │ ├── db.py
│ │ │ ├── schemas.py
│ │ │ └── users.py
│ │ ├── celery.py
│ │ ├── core/
│ │ │ ├── config.py
│ │ │ └── db.py
│ │ ├── evaluation/
│ │ │ ├── evals.py
│ │ │ └── evaluators/
│ │ │ ├── __init__.py
│ │ │ ├── e2e_rag_evaluator.py
│ │ │ ├── language_detector.py
│ │ │ └── toxicity.py
│ │ ├── exceptions.py
│ │ ├── experiments/
│ │ │ ├── sql_extraction.py
│ │ │ └── sql_sample_gen.py
│ │ ├── file_storage/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── local.py
│ │ ├── logger.py
│ │ ├── models/
│ │ │ ├── __init__.py
│ │ │ ├── api_key.py
│ │ │ ├── auth.py
│ │ │ ├── base.py
│ │ │ ├── chat.py
│ │ │ ├── chat_engine.py
│ │ │ ├── chat_message.py
│ │ │ ├── chunk.py
│ │ │ ├── data_source.py
│ │ │ ├── document.py
│ │ │ ├── embed_model.py
│ │ │ ├── entity.py
│ │ │ ├── evaluation_dataset.py
│ │ │ ├── evaluation_task.py
│ │ │ ├── feedback.py
│ │ │ ├── knowledge_base.py
│ │ │ ├── knowledge_base_scoped/
│ │ │ │ ├── __init__.py
│ │ │ │ └── table_naming.py
│ │ │ ├── llm.py
│ │ │ ├── recommend_question.py
│ │ │ ├── relationship.py
│ │ │ ├── reranker_model.py
│ │ │ ├── semantic_cache.py
│ │ │ ├── site_setting.py
│ │ │ ├── staff_action_log.py
│ │ │ └── upload.py
│ │ ├── rag/
│ │ │ ├── __init__.py
│ │ │ ├── build_index.py
│ │ │ ├── chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chat_flow.py
│ │ │ │ ├── chat_service.py
│ │ │ │ ├── config.py
│ │ │ │ ├── retrieve/
│ │ │ │ │ └── retrieve_flow.py
│ │ │ │ └── stream_protocol.py
│ │ │ ├── datasource/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── consts.py
│ │ │ │ ├── file.py
│ │ │ │ ├── web_base.py
│ │ │ │ ├── web_single_page.py
│ │ │ │ └── web_sitemap.py
│ │ │ ├── default_prompt.py
│ │ │ ├── embeddings/
│ │ │ │ ├── local/
│ │ │ │ │ └── local_embedding.py
│ │ │ │ ├── open_like/
│ │ │ │ │ └── openai_like_embedding.py
│ │ │ │ ├── provider.py
│ │ │ │ └── resolver.py
│ │ │ ├── indices/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── knowledge_graph/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── extractor.py
│ │ │ │ │ ├── graph_store/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── helpers.py
│ │ │ │ │ │ ├── schema.py
│ │ │ │ │ │ ├── tidb_graph_editor.py
│ │ │ │ │ │ └── tidb_graph_store.py
│ │ │ │ │ └── schema.py
│ │ │ │ └── vector_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── vector_store/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tidb_vector_store.py
│ │ │ ├── knowledge_base/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── config.py
│ │ │ │ ├── index_store.py
│ │ │ │ └── schema.py
│ │ │ ├── llms/
│ │ │ │ ├── dspy.py
│ │ │ │ ├── provider.py
│ │ │ │ └── resolver.py
│ │ │ ├── node_parser/
│ │ │ │ ├── __init__.py
│ │ │ │ └── file/
│ │ │ │ └── markdown.py
│ │ │ ├── postprocessors/
│ │ │ │ ├── __init__.py
│ │ │ │ └── metadata_post_filter.py
│ │ │ ├── query_dispatcher.py
│ │ │ ├── question_gen/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── helpers.py
│ │ │ │ └── query_decomposer.py
│ │ │ ├── rerankers/
│ │ │ │ ├── baisheng/
│ │ │ │ │ └── baisheng_reranker.py
│ │ │ │ ├── local/
│ │ │ │ │ └── local_reranker.py
│ │ │ │ ├── provider.py
│ │ │ │ ├── resolver.py
│ │ │ │ └── vllm/
│ │ │ │ └── vllm_reranker.py
│ │ │ ├── retrievers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chunk/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── fusion_retriever.py
│ │ │ │ │ ├── helpers.py
│ │ │ │ │ ├── schema.py
│ │ │ │ │ └── simple_retriever.py
│ │ │ │ ├── knowledge_graph/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── fusion_retriever.py
│ │ │ │ │ ├── schema.py
│ │ │ │ │ └── simple_retriever.py
│ │ │ │ └── multiple_knowledge_base.py
│ │ │ ├── semantic_cache/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── types.py
│ │ │ └── utils.py
│ │ ├── repositories/
│ │ │ ├── __init__.py
│ │ │ ├── base_repo.py
│ │ │ ├── chat.py
│ │ │ ├── chat_engine.py
│ │ │ ├── chunk.py
│ │ │ ├── data_source.py
│ │ │ ├── document.py
│ │ │ ├── embedding_model.py
│ │ │ ├── feedback.py
│ │ │ ├── graph.py
│ │ │ ├── knowledge_base.py
│ │ │ ├── llm.py
│ │ │ ├── reranker_model.py
│ │ │ ├── staff_action_log.py
│ │ │ └── user.py
│ │ ├── site_settings/
│ │ │ ├── __init__.py
│ │ │ ├── default.py
│ │ │ ├── default_settings.yml
│ │ │ └── types.py
│ │ ├── staff_action/
│ │ │ └── __init__.py
│ │ ├── tasks/
│ │ │ ├── __init__.py
│ │ │ ├── build_index.py
│ │ │ ├── evaluate.py
│ │ │ └── knowledge_base.py
│ │ ├── types.py
│ │ └── utils/
│ │ ├── aes.py
│ │ ├── namespace.py
│ │ ├── singleflight_cache.py
│ │ ├── tracing.py
│ │ └── uuid6.py
│ ├── bootstrap.py
│ ├── dspy_compiled_program/
│ │ └── decompose_query/
│ │ ├── demos.json
│ │ └── program.json
│ ├── dspy_program.py
│ ├── local_embedding_reranker/
│ │ ├── .dockerignore
│ │ ├── Dockerfile
│ │ ├── main.py
│ │ └── requirements.txt
│ ├── main.py
│ ├── prestart.sh
│ ├── pyproject.toml
│ ├── supervisord.conf
│ └── tests/
│ ├── __init__.py
│ ├── conftest.py
│ ├── test_dynamic_models.py
│ └── test_llms.py
├── core/
│ ├── .cursor/
│ │ └── rules/
│ │ └── code-style.mdc
│ ├── .gitignore
│ ├── .python-version
│ ├── Makefile
│ ├── README.md
│ ├── autoflow/
│ │ ├── __init__.py
│ │ ├── chunkers/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── helper.py
│ │ │ └── text.py
│ │ ├── configs/
│ │ │ ├── __init__.py
│ │ │ ├── chunkers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── text.py
│ │ │ ├── db.py
│ │ │ ├── knowledge_base.py
│ │ │ ├── main.py
│ │ │ └── models/
│ │ │ ├── __init__.py
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── common.py
│ │ │ │ ├── jina_ai.py
│ │ │ │ └── openai.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── common.py
│ │ │ │ └── openai.py
│ │ │ ├── manager.py
│ │ │ ├── providers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── jinaai.py
│ │ │ │ └── openai.py
│ │ │ └── rerankers/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── common.py
│ │ │ └── jina_ai.py
│ │ ├── data_types.py
│ │ ├── db.py
│ │ ├── knowledge_base/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── prompts.py
│ │ ├── knowledge_graph/
│ │ │ ├── __init__.py
│ │ │ ├── extractors/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── simple.py
│ │ │ ├── index.py
│ │ │ ├── programs/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_graph.py
│ │ │ │ ├── extract_covariates.py
│ │ │ │ └── extract_graph.py
│ │ │ ├── retrievers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── simple.py
│ │ │ │ └── weighted.py
│ │ │ └── types.py
│ │ ├── loaders/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── helper.py
│ │ │ ├── markdown.py
│ │ │ ├── pdf.py
│ │ │ └── webpage.py
│ │ ├── main.py
│ │ ├── models/
│ │ │ ├── __init__.py
│ │ │ ├── embedding_models/
│ │ │ │ ├── __init__.py
│ │ │ │ └── litellm.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── dspy.py
│ │ │ │ └── litellm.py
│ │ │ ├── manager.py
│ │ │ ├── provider.py
│ │ │ └── rerank_models/
│ │ │ ├── __init__.py
│ │ │ └── litellm.py
│ │ ├── orms/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── py.typed
│ │ ├── storage/
│ │ │ ├── __init__.py
│ │ │ ├── doc_store/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── tidb_doc_store.py
│ │ │ │ └── types.py
│ │ │ ├── graph_store/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── tidb_graph_store.py
│ │ │ │ └── types.py
│ │ │ └── types.py
│ │ ├── types.py
│ │ └── utils/
│ │ ├── hash.py
│ │ ├── uuid6.py
│ │ └── vector.py
│ ├── examples/
│ │ ├── README.md
│ │ ├── quickstart/
│ │ │ ├── fixtures/
│ │ │ │ ├── analyze-slow-queries.md
│ │ │ │ └── tidb-overview.md
│ │ │ └── quickstart.ipynb
│ │ └── streamlit/
│ │ ├── README.md
│ │ ├── build-knowledge-search-with-autoflow-and-streamlit.py
│ │ └── reqs.txt
│ ├── experimental/
│ │ ├── README.md
│ │ └── kg_extraction/
│ │ └── extract_graph.ipynb
│ ├── pyproject.toml
│ └── tests/
│ ├── __init__.py
│ ├── conftest.py
│ ├── fixtures/
│ │ ├── analyze-slow-queries.md
│ │ └── tidb-overview.md
│ ├── knowledge_base/
│ │ ├── __init__.py
│ │ ├── test_kb_with_namespace.py
│ │ └── test_kb_without_namespace.py
│ ├── knowledge_graph/
│ │ ├── programs/
│ │ │ └── test_extract_graph.py
│ │ └── test_kg_extractor.py
│ ├── models/
│ │ └── test_model_manager.py
│ └── storage/
│ ├── __init__.py
│ ├── doc_store/
│ │ └── test_tidb_doc_store.py
│ └── graph_store/
│ ├── __init__.py
│ └── test_tidb_graph_store.py
├── docker-compose-cn.yml
├── docker-compose.dev.yml
├── docker-compose.yml
├── docs/
│ ├── .gitignore
│ ├── mdx-components.ts
│ ├── next-sitemap.config.js
│ ├── next.config.mjs
│ ├── package.json
│ ├── src/
│ │ ├── app/
│ │ │ ├── [[...mdxPath]]/
│ │ │ │ └── page.jsx
│ │ │ ├── _app.tsx
│ │ │ ├── _ignored/
│ │ │ │ ├── _meta.js
│ │ │ │ └── page.mdx
│ │ │ ├── _meta.ts
│ │ │ ├── globals.css
│ │ │ └── layout.jsx
│ │ └── content/
│ │ ├── README.md
│ │ ├── _meta.ts
│ │ ├── chat-engine.mdx
│ │ ├── deploy-with-docker.mdx
│ │ ├── embedding-model.mdx
│ │ ├── evaluation.mdx
│ │ ├── faq.mdx
│ │ ├── index.mdx
│ │ ├── javascript.mdx
│ │ ├── knowledge-base.mdx
│ │ ├── llm.mdx
│ │ ├── quick-start.mdx
│ │ ├── releases/
│ │ │ ├── _meta.ts
│ │ │ ├── index.mdx
│ │ │ ├── v0.1.0.md
│ │ │ ├── v0.2.0.md
│ │ │ ├── v0.3.0.md
│ │ │ └── v0.4.0.md
│ │ ├── requirements.mdx
│ │ ├── reranker-model.mdx
│ │ └── resources.mdx
│ └── tsconfig.json
├── e2e/
│ ├── .gitignore
│ ├── README.md
│ ├── deploy-test-result.sh
│ ├── docker-compose.yml
│ ├── global.setup.ts
│ ├── package.json
│ ├── playwright.config.ts
│ ├── prepare-test.sh
│ ├── res/
│ │ └── sample-evaluation-dataset.csv
│ ├── start-test.sh
│ ├── test-html/
│ │ ├── example-doc-1.html
│ │ ├── example-doc-2.html
│ │ ├── example-sitemap.xml
│ │ ├── widget-controlled.html
│ │ └── widget.html
│ ├── tests/
│ │ ├── api-keys.spec.ts
│ │ ├── api.spec.ts
│ │ ├── bootstrap.ts
│ │ ├── chat-engine.spec.ts
│ │ ├── chat.spec.ts
│ │ ├── datasource.spec.ts
│ │ ├── evaluation.spec.ts
│ │ ├── knowledge-base.spec.ts
│ │ ├── site-settings.spec.ts
│ │ └── widget.spec.ts
│ ├── utils/
│ │ ├── chat.ts
│ │ ├── forms.ts
│ │ └── login.ts
│ └── vercel.json
└── frontend/
├── .gitignore
├── .nvmrc
├── .prettierignore
├── Dockerfile
├── app/
│ ├── .eslintrc.json
│ ├── .gitignore
│ ├── .storybook/
│ │ ├── main.ts
│ │ └── preview.ts
│ ├── README.md
│ ├── components.json
│ ├── jest.config.ts
│ ├── jest.polyfills.js
│ ├── next-sitemap.config.js
│ ├── next.config.ts
│ ├── notice.md
│ ├── package.json
│ ├── postcss.config.mjs
│ ├── public/
│ │ └── chats.mock.txt
│ ├── src/
│ │ ├── api/
│ │ │ ├── .gitignore
│ │ │ ├── api-keys.ts
│ │ │ ├── auth.ts
│ │ │ ├── chat-engines.ts
│ │ │ ├── chats.ts
│ │ │ ├── commons.ts
│ │ │ ├── datasources.ts
│ │ │ ├── documents.ts
│ │ │ ├── embedding-models.ts
│ │ │ ├── evaluations.ts
│ │ │ ├── feedbacks.ts
│ │ │ ├── graph.ts
│ │ │ ├── knowledge-base.ts
│ │ │ ├── llms.ts
│ │ │ ├── providers.ts
│ │ │ ├── rag.ts
│ │ │ ├── rerankers.ts
│ │ │ ├── site-settings.ts
│ │ │ ├── stats.ts
│ │ │ ├── system.ts
│ │ │ └── users.ts
│ │ ├── app/
│ │ │ ├── (experimental)/
│ │ │ │ └── experimental-features/
│ │ │ │ └── route.ts
│ │ │ ├── (main)/
│ │ │ │ ├── (.)auth/
│ │ │ │ │ └── login/
│ │ │ │ │ ├── loading.tsx
│ │ │ │ │ ├── page.client.tsx
│ │ │ │ │ └── page.tsx
│ │ │ │ ├── (admin)/
│ │ │ │ │ ├── chat-engines/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── new/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── embedding-models/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── evaluation/
│ │ │ │ │ │ ├── datasets/
│ │ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ │ ├── items/
│ │ │ │ │ │ │ │ │ ├── [itemId]/
│ │ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ │ └── new/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── not-found.tsx
│ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── page.tsx
│ │ │ │ │ │ └── tasks/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ ├── not-found.tsx
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── feedbacks/
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── knowledge-bases/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ ├── (special)/
│ │ │ │ │ │ │ │ ├── data-sources/
│ │ │ │ │ │ │ │ │ └── new/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ └── documents/
│ │ │ │ │ │ │ │ └── [documentId]/
│ │ │ │ │ │ │ │ └── chunks/
│ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ ├── (tabs)/
│ │ │ │ │ │ │ │ ├── data-sources/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── index-progress/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── knowledge-graph-explorer/
│ │ │ │ │ │ │ │ │ ├── create-synopsis-entity/
│ │ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── layout.tsx
│ │ │ │ │ │ │ │ ├── page.tsx
│ │ │ │ │ │ │ │ ├── settings/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ └── tabs.tsx
│ │ │ │ │ │ │ ├── api.ts
│ │ │ │ │ │ │ └── context.tsx
│ │ │ │ │ │ ├── new/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── layout.tsx
│ │ │ │ │ ├── llms/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── reranker-models/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── site-settings/
│ │ │ │ │ │ ├── custom_js/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── integrations/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── layout.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ └── stats/
│ │ │ │ │ └── trending/
│ │ │ │ │ └── page.tsx
│ │ │ │ ├── (user)/
│ │ │ │ │ ├── api-keys/
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── c/
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ └── layout.tsx
│ │ │ │ ├── c/
│ │ │ │ │ └── [id]/
│ │ │ │ │ └── page.tsx
│ │ │ │ ├── layout.tsx
│ │ │ │ ├── nav.tsx
│ │ │ │ └── page.tsx
│ │ │ ├── RootProviders.tsx
│ │ │ ├── api/
│ │ │ │ └── [[...fallback_placeholder]]/
│ │ │ │ └── route.ts
│ │ │ ├── auth/
│ │ │ │ └── login/
│ │ │ │ └── page.tsx
│ │ │ ├── chart-theme.css
│ │ │ ├── globals.css
│ │ │ └── layout.tsx
│ │ ├── components/
│ │ │ ├── admin-page-heading.tsx
│ │ │ ├── admin-page-layout.tsx
│ │ │ ├── api-keys/
│ │ │ │ └── CreateApiKeyForm.tsx
│ │ │ ├── auth/
│ │ │ │ └── AuthProvider.tsx
│ │ │ ├── auto-scroll/
│ │ │ │ ├── auto-scroll.stories.tsx
│ │ │ │ ├── auto-scroll.tsx
│ │ │ │ ├── context.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── manual-scroll-voter.tsx
│ │ │ │ ├── use-auto-scroll-voter.ts
│ │ │ │ └── use-request-scroll.ts
│ │ │ ├── branding.tsx
│ │ │ ├── cells/
│ │ │ │ ├── actions.tsx
│ │ │ │ ├── boolean.tsx
│ │ │ │ ├── datetime.tsx
│ │ │ │ ├── error-message.tsx
│ │ │ │ ├── link.tsx
│ │ │ │ ├── metadata.tsx
│ │ │ │ ├── mono.tsx
│ │ │ │ ├── percent.tsx
│ │ │ │ └── reference.tsx
│ │ │ ├── charts/
│ │ │ │ ├── IndexProgressChart.stories.tsx
│ │ │ │ ├── IndexProgressChart.tsx
│ │ │ │ ├── TotalCard.stories.tsx
│ │ │ │ ├── TotalCard.tsx
│ │ │ │ └── TrendsChart.tsx
│ │ │ ├── chat/
│ │ │ │ ├── ask.tsx
│ │ │ │ ├── chat-controller.test.ts
│ │ │ │ ├── chat-controller.ts
│ │ │ │ ├── chat-hooks.tsx
│ │ │ │ ├── chat-message-controller.test.ts
│ │ │ │ ├── chat-message-controller.ts
│ │ │ │ ├── chat-new-dialog.tsx
│ │ │ │ ├── chat-stream-state.ts
│ │ │ │ ├── chat-stream.state.test.ts
│ │ │ │ ├── chats-history.tsx
│ │ │ │ ├── chats-table.tsx
│ │ │ │ ├── conversation-message-groups.scss
│ │ │ │ ├── conversation-message-groups.tsx
│ │ │ │ ├── conversation.test.tsx
│ │ │ │ ├── conversation.tsx
│ │ │ │ ├── debug-info.tsx
│ │ │ │ ├── knowledge-graph-debug-info.tsx
│ │ │ │ ├── message-annotation-history-stackvm.tsx
│ │ │ │ ├── message-annotation-history.tsx
│ │ │ │ ├── message-answer.tsx
│ │ │ │ ├── message-auto-scroll.tsx
│ │ │ │ ├── message-beta-alert.tsx
│ │ │ │ ├── message-content-sources.tsx
│ │ │ │ ├── message-content.test.tsx
│ │ │ │ ├── message-content.tsx
│ │ │ │ ├── message-error.tsx
│ │ │ │ ├── message-feedback.tsx
│ │ │ │ ├── message-input.tsx
│ │ │ │ ├── message-operations.tsx
│ │ │ │ ├── message-recommend-questions.tsx
│ │ │ │ ├── message-section.tsx
│ │ │ │ ├── testutils.ts
│ │ │ │ ├── use-ask.ts
│ │ │ │ ├── use-message-feedback.ts
│ │ │ │ └── utils.ts
│ │ │ ├── chat-engine/
│ │ │ │ ├── chat-engines-table.tsx
│ │ │ │ ├── create-chat-engine-form.tsx
│ │ │ │ ├── hooks.ts
│ │ │ │ ├── kb-list-select.tsx
│ │ │ │ └── update-chat-engine-form.tsx
│ │ │ ├── code-theme.scss
│ │ │ ├── config-viewer.tsx
│ │ │ ├── copy-button.tsx
│ │ │ ├── dangerous-action-button.tsx
│ │ │ ├── data-table-heading.tsx
│ │ │ ├── data-table-remote.tsx
│ │ │ ├── data-table.tsx
│ │ │ ├── datasource/
│ │ │ │ ├── create-datasource-form.tsx
│ │ │ │ ├── datasource-card.tsx
│ │ │ │ ├── datasource-create-option.tsx
│ │ │ │ ├── no-datasource-placeholder.tsx
│ │ │ │ └── update-datasource-form.tsx
│ │ │ ├── date-format.tsx
│ │ │ ├── date-range-picker.tsx
│ │ │ ├── diff-seconds.tsx
│ │ │ ├── document-viewer.tsx
│ │ │ ├── documents/
│ │ │ │ ├── documents-table-filters.tsx
│ │ │ │ └── documents-table.tsx
│ │ │ ├── embedding-models/
│ │ │ │ ├── CreateEmbeddingModelForm.tsx
│ │ │ │ ├── EmbeddingModelInfo.tsx
│ │ │ │ ├── EmbeddingModelsTable.tsx
│ │ │ │ ├── UpdateEmbeddingModelForm.tsx
│ │ │ │ └── hooks.tsx
│ │ │ ├── error-card.tsx
│ │ │ ├── evaluations/
│ │ │ │ ├── cells.tsx
│ │ │ │ ├── create-evaluation-dataset-form.stories.tsx
│ │ │ │ ├── create-evaluation-dataset-form.tsx
│ │ │ │ ├── create-evaluation-dataset-item-form.stories.tsx
│ │ │ │ ├── create-evaluation-dataset-item-form.tsx
│ │ │ │ ├── create-evaluation-task-form.stories.tsx
│ │ │ │ ├── create-evaluation-task-form.tsx
│ │ │ │ ├── evaluation-dataset-info.tsx
│ │ │ │ ├── evaluation-dataset-items-table.tsx
│ │ │ │ ├── evaluation-datasets-table.tsx
│ │ │ │ ├── evaluation-task-info.stories.tsx
│ │ │ │ ├── evaluation-task-info.tsx
│ │ │ │ ├── evaluation-task-items-table.tsx
│ │ │ │ ├── evaluation-tasks-table.tsx
│ │ │ │ ├── hooks.ts
│ │ │ │ ├── keyword-filter-toolbar.tsx
│ │ │ │ └── update-evaluation-dataset-item-form.tsx
│ │ │ ├── feedbacks/
│ │ │ │ └── feedbacks-table.tsx
│ │ │ ├── form/
│ │ │ │ ├── biz.tsx
│ │ │ │ ├── control-widget.tsx
│ │ │ │ ├── create-entity-form.tsx
│ │ │ │ ├── field-layout.tsx
│ │ │ │ ├── root-error.tsx
│ │ │ │ ├── utils.ts
│ │ │ │ └── widgets/
│ │ │ │ ├── CodeInput.tsx
│ │ │ │ ├── FileInput.tsx
│ │ │ │ ├── FilesInput.tsx
│ │ │ │ └── PromptInput.tsx
│ │ │ ├── form-sections.tsx
│ │ │ ├── graph/
│ │ │ │ ├── GraphCreateEntity.tsx
│ │ │ │ ├── GraphEditor.tsx
│ │ │ │ ├── action.ts
│ │ │ │ ├── components/
│ │ │ │ │ ├── EditingButton.tsx
│ │ │ │ │ ├── EntitiesTable.tsx
│ │ │ │ │ ├── InputField.tsx
│ │ │ │ │ ├── JsonEditor.tsx
│ │ │ │ │ ├── JsonField.tsx
│ │ │ │ │ ├── LinkDetails.tsx
│ │ │ │ │ ├── NetworkCanvas.tsx
│ │ │ │ │ ├── NetworkContext.ts
│ │ │ │ │ ├── NetworkViewer.tsx
│ │ │ │ │ ├── NodeDetails.tsx
│ │ │ │ │ ├── SearchEntity.tsx
│ │ │ │ │ ├── SearchEntityById.tsx
│ │ │ │ │ └── TextareaField.tsx
│ │ │ │ ├── index.ts
│ │ │ │ ├── network/
│ │ │ │ │ ├── CanvasNetworkRenderer.ts
│ │ │ │ │ ├── Network.ts
│ │ │ │ │ └── NetworkRendererOptions.ts
│ │ │ │ ├── remote.ts
│ │ │ │ ├── selectEntities.ts
│ │ │ │ ├── useDirtyEntity.ts
│ │ │ │ ├── useDirtyRelationship.ts
│ │ │ │ ├── useNetwork.ts
│ │ │ │ └── utils.ts
│ │ │ ├── gtag-provider.tsx
│ │ │ ├── html-viewer.tsx
│ │ │ ├── icons/
│ │ │ │ └── index.ts
│ │ │ ├── knowledge-base/
│ │ │ │ ├── create-knowledge-base-form.stories.tsx
│ │ │ │ ├── create-knowledge-base-form.tsx
│ │ │ │ ├── document-chunks-table.tsx
│ │ │ │ ├── empty-state.tsx
│ │ │ │ ├── form-index-methods.tsx
│ │ │ │ ├── hooks.ts
│ │ │ │ ├── knowledge-base-card.stories.tsx
│ │ │ │ ├── knowledge-base-card.tsx
│ │ │ │ ├── knowledge-base-chunking-config-fields.tsx
│ │ │ │ ├── knowledge-base-index.tsx
│ │ │ │ └── knowledge-base-settings-form.tsx
│ │ │ ├── llm/
│ │ │ │ ├── CreateLLMForm.tsx
│ │ │ │ ├── LLMsTable.tsx
│ │ │ │ ├── LlmInfo.tsx
│ │ │ │ ├── UpdateLLMForm.tsx
│ │ │ │ └── hooks.ts
│ │ │ ├── loader.tsx
│ │ │ ├── managed-dialog-close.tsx
│ │ │ ├── managed-dialog.tsx
│ │ │ ├── managed-panel.tsx
│ │ │ ├── model-component-info.tsx
│ │ │ ├── nextjs/
│ │ │ │ └── NextLink.tsx
│ │ │ ├── option-detail.tsx
│ │ │ ├── portal-provider.tsx
│ │ │ ├── provider-description.tsx
│ │ │ ├── py-viewer.tsx
│ │ │ ├── remark-content/
│ │ │ │ ├── components.tsx
│ │ │ │ ├── context.tsx
│ │ │ │ ├── highlight.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── remark-content.stories.tsx
│ │ │ │ ├── remark-content.tsx
│ │ │ │ └── style.scss
│ │ │ ├── reranker/
│ │ │ │ ├── CreateRerankerForm.tsx
│ │ │ │ ├── RerankerInfo.tsx
│ │ │ │ ├── RerankerModelsTable.tsx
│ │ │ │ ├── UpdateRerankerForm.tsx
│ │ │ │ └── hooks.ts
│ │ │ ├── resource-not-found.tsx
│ │ │ ├── row-checkbox.tsx
│ │ │ ├── secondary-navigator-list.tsx
│ │ │ ├── security-setting-provider.tsx
│ │ │ ├── settings/
│ │ │ │ ├── CustomJsSettings.tsx
│ │ │ │ ├── IntegrationsSettings.tsx
│ │ │ │ ├── LinkArrayField.tsx
│ │ │ │ ├── SettingsField.tsx
│ │ │ │ ├── StringArrayField.tsx
│ │ │ │ ├── WebsiteSettings.tsx
│ │ │ │ └── WidgetSnippet.tsx
│ │ │ ├── settings-form/
│ │ │ │ ├── GeneralSettingsField.tsx
│ │ │ │ ├── GeneralSettingsForm.tsx
│ │ │ │ ├── accessor-helper.ts
│ │ │ │ ├── context.tsx
│ │ │ │ ├── index.ts
│ │ │ │ └── utils.ts
│ │ │ ├── signin.tsx
│ │ │ ├── site-header-actions.tsx
│ │ │ ├── site-header.tsx
│ │ │ ├── site-nav.tsx
│ │ │ ├── system/
│ │ │ │ ├── BootstrapStatusProvider.tsx
│ │ │ │ └── SystemWizardBanner.tsx
│ │ │ ├── theme-toggle.tsx
│ │ │ ├── theme.stories.tsx
│ │ │ ├── themed-style.ts
│ │ │ ├── ui/
│ │ │ │ ├── accordion.tsx
│ │ │ │ ├── alert-dialog.tsx
│ │ │ │ ├── alert.tsx
│ │ │ │ ├── aspect-ratio.tsx
│ │ │ │ ├── avatar.tsx
│ │ │ │ ├── badge.tsx
│ │ │ │ ├── breadcrumb.tsx
│ │ │ │ ├── button.tsx
│ │ │ │ ├── calendar.tsx
│ │ │ │ ├── card.tsx
│ │ │ │ ├── carousel.tsx
│ │ │ │ ├── chart.tsx
│ │ │ │ ├── checkbox.tsx
│ │ │ │ ├── collapsible.tsx
│ │ │ │ ├── command.tsx
│ │ │ │ ├── context-menu.tsx
│ │ │ │ ├── dialog.tsx
│ │ │ │ ├── dot-pattern.tsx
│ │ │ │ ├── drawer.tsx
│ │ │ │ ├── dropdown-menu.tsx
│ │ │ │ ├── form.beta.tsx
│ │ │ │ ├── form.tsx
│ │ │ │ ├── hover-card.tsx
│ │ │ │ ├── input-otp.tsx
│ │ │ │ ├── input.tsx
│ │ │ │ ├── label.tsx
│ │ │ │ ├── menubar.tsx
│ │ │ │ ├── navigation-menu.tsx
│ │ │ │ ├── pagination.tsx
│ │ │ │ ├── popover.tsx
│ │ │ │ ├── progress.tsx
│ │ │ │ ├── radio-group.tsx
│ │ │ │ ├── resizable.tsx
│ │ │ │ ├── scroll-area.tsx
│ │ │ │ ├── select.tsx
│ │ │ │ ├── separator.tsx
│ │ │ │ ├── sheet.tsx
│ │ │ │ ├── sidebar.tsx
│ │ │ │ ├── skeleton.tsx
│ │ │ │ ├── slider.tsx
│ │ │ │ ├── sonner.tsx
│ │ │ │ ├── switch.tsx
│ │ │ │ ├── table.tsx
│ │ │ │ ├── tabs.tsx
│ │ │ │ ├── textarea.tsx
│ │ │ │ ├── toast.tsx
│ │ │ │ ├── toaster.tsx
│ │ │ │ ├── toggle-group.tsx
│ │ │ │ ├── toggle.tsx
│ │ │ │ ├── tooltip.tsx
│ │ │ │ └── use-toast.ts
│ │ │ ├── use-active-theme.ts
│ │ │ ├── use-data-table.ts
│ │ │ ├── use-href.ts
│ │ │ ├── use-latest-ref.tsx
│ │ │ ├── use-search-param.ts
│ │ │ ├── use-size.ts
│ │ │ └── website-setting-provider.tsx
│ │ ├── core/
│ │ │ └── schema/
│ │ │ ├── NOTICE.md
│ │ │ └── settings/
│ │ │ └── security.ts
│ │ ├── experimental/
│ │ │ ├── chat-verify-service/
│ │ │ │ ├── api.mock.ts
│ │ │ │ ├── api.react-server.ts
│ │ │ │ ├── api.tidbai-widget.ts
│ │ │ │ ├── api.ts
│ │ │ │ ├── message-verify-result-markdown.tsx
│ │ │ │ ├── message-verify.stories.tsx
│ │ │ │ └── message-verify.tsx
│ │ │ ├── experimental-features-provider.tsx
│ │ │ └── experimental-features.ts
│ │ ├── hooks/
│ │ │ ├── use-mobile.tsx
│ │ │ └── use-model-provider.ts
│ │ └── lib/
│ │ ├── auth.ts
│ │ ├── buffered-readable-stream.test.ts
│ │ ├── buffered-readable-stream.ts
│ │ ├── errors.ts
│ │ ├── react.ts
│ │ ├── request/
│ │ │ ├── authenticationHeaders.mock.ts
│ │ │ ├── authenticationHeaders.react-server.ts
│ │ │ ├── authenticationHeaders.tidbai-widget.ts
│ │ │ ├── authenticationHeaders.ts
│ │ │ ├── base-url.mock.ts
│ │ │ ├── base-url.react-server.ts
│ │ │ ├── base-url.tidbai-widget.ts
│ │ │ ├── base-url.ts
│ │ │ ├── errors.ts
│ │ │ ├── index.ts
│ │ │ ├── list-all-helper.ts
│ │ │ ├── params.ts
│ │ │ ├── response-handlers.ts
│ │ │ └── url.ts
│ │ ├── stackvm/
│ │ │ ├── core/
│ │ │ │ ├── index.ts
│ │ │ │ ├── instructions/
│ │ │ │ │ ├── index.ts
│ │ │ │ │ ├── instructions.ts
│ │ │ │ │ └── registry.ts
│ │ │ │ ├── model.ts
│ │ │ │ ├── types.ts
│ │ │ │ └── visit.ts
│ │ │ └── index.ts
│ │ ├── strings.ts
│ │ ├── tanstack-form.ts
│ │ ├── typing-utils.ts
│ │ ├── ui-error.tsx
│ │ ├── utils.ts
│ │ ├── zod.test.ts
│ │ └── zod.ts
│ ├── tailwind.config.ts
│ └── tsconfig.json
├── package.json
├── packages/
│ └── widget-react/
│ ├── .eslintrc.cjs
│ ├── .gitignore
│ ├── README.md
│ ├── USAGE.md
│ ├── index.html
│ ├── package.json
│ ├── postcss.config.js
│ ├── src/
│ │ ├── Widget.css
│ │ ├── Widget.tsx
│ │ ├── index.css
│ │ ├── library.tsx
│ │ ├── load-config.ts
│ │ ├── overrides/
│ │ │ ├── README.md
│ │ │ └── components/
│ │ │ ├── code-theme.scss
│ │ │ └── remark-content/
│ │ │ └── style.scss
│ │ ├── prepare-gtag.ts
│ │ └── vite-env.d.ts
│ ├── tailwind.config.ts
│ ├── tsconfig.app.json
│ ├── tsconfig.json
│ ├── tsconfig.node.json
│ └── vite.config.ts
├── patches/
│ ├── @jest__environment@29.7.0.patch
│ └── jest-runtime@29.7.0.patch
└── pnpm-workspace.yaml
================================================
FILE CONTENTS
================================================
================================================
FILE: .dockerignore
================================================
# Frontend
.github
e2e
frontend/Dockerfile
frontend/**/node_modules
frontend/app/.next
frontend/app/.swc
frontend/packages/*/dist
================================================
FILE: .github/actions/decide/.gitignore
================================================
node_modules
================================================
FILE: .github/actions/decide/action.yml
================================================
name: 'Decide action'
description: "See issue https://github.com/pingcap/tidb.ai/issues/314"
inputs:
pr-e2e-frontend-label-prefix:
description: "Defaults to 'e2e-frontend:'."
required: true
default: "e2e-frontend:"
pr-e2e-backend-label-prefix:
description: "Defaults to 'e2e-frontend:'."
required: true
default: "e2e-backend:"
outputs:
should-build-frontend:
description: Should build frontend
should-build-backend:
description: Should build backend
e2e-frontend:
description: Frontend docker version
e2e-backend:
description: Frontend docker version
runs:
using: 'node20'
main: 'index.js'
================================================
FILE: .github/actions/decide/index.js
================================================
import * as core from '@actions/core';
import { context, } from '@actions/github';
const FRONTEND_PREFIX = core.getInput('pr-e2e-frontend-label-prefix', { required: true });
const BACKEND_PREFIX = core.getInput('pr-e2e-backend-label-prefix', { required: true });
function run() {
const defaultImageVersion = `sha-${context.sha}-dev`
if (context.eventName !== 'pull_request') {
// Build and run E2E for all other events.
// TODO: Maybe handle commit message like "feat(frontend-only): Some message"
core.setOutput('should-build-frontend', true);
core.setOutput('should-build-backend', true);
core.setOutput('e2e-frontend', defaultImageVersion);
core.setOutput('e2e-backend', defaultImageVersion);
return;
}
let label
if (!!(label = findPRLabel(label => label.name.startsWith(FRONTEND_PREFIX)))) {
core.setOutput('should-build-frontend', false);
core.setOutput('e2e-frontend', label.name.slice(FRONTEND_PREFIX.length));
core.info(`E2E Frontend: ${label.name.slice(FRONTEND_PREFIX.length)}`);
} else {
core.setOutput('should-build-frontend', true);
core.setOutput('e2e-frontend', defaultImageVersion);
}
if (!!(label = findPRLabel(label => label.name.startsWith(BACKEND_PREFIX)))) {
core.setOutput('should-build-backend', false);
core.setOutput('e2e-backend', label.name.slice(BACKEND_PREFIX.length));
core.info(`E2E Backend: ${label.name.slice(BACKEND_PREFIX.length)}`);
} else {
core.setOutput('should-build-backend', true);
core.setOutput('e2e-backend', defaultImageVersion);
}
}
function findPRLabel(test) {
return context.payload.pull_request.labels.find(test);
}
run();
================================================
FILE: .github/actions/decide/package.json
================================================
{
"name": "@tidbai/ci-decide-action",
"version": "1.0.0",
"type": "module",
"module": "index.js",
"devDependencies": {
"@actions/core": "^1.11.1",
"@actions/github": "^6.0.0"
}
}
================================================
FILE: .github/workflows/backend-test.yml
================================================
name: Backend Test
on:
push:
branches:
- main
paths:
- backend/**
pull_request:
branches:
- main
paths:
- backend/**
jobs:
backend-test:
name: Backend Test
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./backend
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Ruff Format
uses: astral-sh/ruff-action@v3
- name: Run Ruff Check
run: ruff check
- name: Run Ruff Format
run: ruff format
================================================
FILE: .github/workflows/deploy.yml
================================================
name: Deploy to Production
on:
workflow_dispatch:
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: ssh and deploy
uses: appleboy/ssh-action@v1.0.3
with:
host: ${{ secrets.DEPLOY_HOST }}
username: ${{ secrets.DEPLOY_USERNAME }}
key: ${{ secrets.DEPLOY_SSH_KEY }}
port: ${{ secrets.DEPLOY_PORT }}
script: cd /home/ubuntu/py.tidb.ai-docker && docker compose up -d --force-recreate --pull always
================================================
FILE: .github/workflows/regression.yml
================================================
name: Regression Test
on:
workflow_dispatch:
inputs:
dataset:
description: 'Langfuse dataset to test'
required: true
default: 'regression'
tidbAIChatEngine:
description: 'TiDB AI Chat Engine to test'
required: true
default: 'default'
llmProvider:
description: 'llm provider'
required: true
default: 'openai'
type: choice
options:
- openai
- gemini
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r backend/requirements.lock
- name: Run tests
env:
ENVIRONMENT: "production"
LANGFUSE_HOST: ${{ secrets.LANGFUSE_HOST }}
LANGFUSE_PUBLIC_KEY: ${{ secrets.LANGFUSE_PUBLIC_KEY }}
LANGFUSE_SECRET_KEY: ${{ secrets.LANGFUSE_SECRET_KEY }}
TIDB_AI_CHAT_ENDPOINT: ${{ secrets.TIDB_AI_CHAT_ENDPOINT }}
TIDB_AI_API_KEY: ${{ secrets.TIDB_AI_API_KEY }}
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
TIDB_HOST: "127.0.0.1"
TIDB_USER: "root"
TIDB_PASSWORD: "fake"
TIDB_DATABASE: "test"
run: |
cd backend && python main.py runeval --llm-provider ${{ inputs.llmProvider }} --dataset ${{ inputs.dataset }} --tidb-ai-chat-engine ${{ inputs.tidbAIChatEngine }}
================================================
FILE: .github/workflows/release.yml
================================================
name: Build and Publish
on:
pull_request:
branches:
- main
types:
- opened
- synchronize
- reopened
- labeled
- unlabeled
push:
branches:
- main
tags:
- '*'
workflow_dispatch:
inputs:
tag:
description: "The tag to build the image with. Format *.*.*((beta|rc)*)?"
required: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
docker-metadata:
name: Generate docker metadata
runs-on: ubuntu-latest
if: ${{ (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'require-build')) || (github.event_name != 'pull_request' && !cancelled()) }}
outputs:
tags: ${{steps.meta.outputs.tags}}
labels: ${{steps.meta.outputs.labels}}
annotations: ${{steps.meta.outputs.annotations}}
platforms: |
linux/amd64
${{(startsWith(github.ref, 'refs/tags/') && 'linux/arm64') || ''}}
should-build-frontend: ${{steps.decide.outputs.should-build-frontend}}
should-build-backend: ${{steps.decide.outputs.should-build-backend}}
e2e-frontend: ${{steps.decide.outputs.e2e-frontend}}
e2e-backend: ${{steps.decide.outputs.e2e-backend}}
permissions:
contents: read
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: lts/*
cache: npm
cache-dependency-path: .github/actions/*/package-lock.json
- run: npm ci
working-directory: .github/actions/decide
- name: Decide
id: decide
uses: ./.github/actions/decide/
with:
pr-e2e-frontend-label-prefix: 'e2e-frontend:'
pr-e2e-backend-label-prefix: 'e2e-backend:'
- name: Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
github-token: ${{ github.token }}
images: "{0}/{1}"
tags: |
type=raw,value=branch-{{branch}},enable=${{ !startsWith(github.ref, 'refs/tags/') && !startsWith(github.ref, 'refs/pull/') }}
type=sha,prefix=sha-,format=long,suffix=-dev,enable=${{ !startsWith(github.ref, 'refs/tags/') }}
type=sha,prefix=sha-,format=long,enable=${{ startsWith(github.ref, 'refs/tags/') }}
type=ref,event=pr
type=pep440,value=${{inputs.tag || github.ref_name}},pattern={{version}}
type=pep440,value=${{inputs.tag || github.ref_name}},pattern={{major}}.{{minor}}
type=pep440,value=${{inputs.tag || github.ref_name}},pattern={{major}}
build-backend:
name: Build and Push Backend Image
runs-on: ubuntu-latest
needs: docker-metadata
if: ${{needs.docker-metadata.outputs.should-build-backend == 'true'}}
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.RELEASE_DOCKERHUB_USERNAME }}
password: ${{ secrets.RELEASE_DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push backend
uses: docker/build-push-action@v6
with:
context: backend
platforms: ${{needs.docker-metadata.outputs.platforms}}
push: true
tags: ${{format(needs.docker-metadata.outputs.tags, secrets.RELEASE_DOCKERHUB_USERNAME, 'backend')}}
labels: ${{needs.docker-metadata.outputs.labels}}
annotations: ${{needs.docker-metadata.outputs.annotations}}
cache-from: type=gha
cache-to: type=gha,mode=max
build-frontend:
name: Build and Push Frontend Image
runs-on: ubuntu-latest
needs: docker-metadata
if: ${{needs.docker-metadata.outputs.should-build-frontend == 'true'}}
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.RELEASE_DOCKERHUB_USERNAME }}
password: ${{ secrets.RELEASE_DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push frontend
uses: docker/build-push-action@v6
with:
context: .
file: ./frontend/Dockerfile
platforms: ${{needs.docker-metadata.outputs.platforms}}
push: true
tags: ${{format(needs.docker-metadata.outputs.tags, secrets.RELEASE_DOCKERHUB_USERNAME, 'frontend')}}
labels: ${{needs.docker-metadata.outputs.labels}}
annotations: ${{needs.docker-metadata.outputs.annotations}}
cache-from: type=gha
cache-to: type=gha,mode=max
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
timeout-minutes: 10
defaults:
run:
working-directory: e2e
needs:
- docker-metadata
- build-backend
- build-frontend
if: |
!cancelled() &&
needs.docker-metadata.outputs.e2e-backend &&
needs.docker-metadata.outputs.e2e-frontend &&
!contains(needs.*.result, 'failure') &&
!contains(needs.*.result, 'cancelled')
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: lts/*
cache: npm
cache-dependency-path: e2e/package-lock.json
- name: Install dependencies
run: npm ci
- name: Install Playwright Browsers
run: npx playwright install --with-deps chromium
- name: Prepare Tests
run: ./prepare-test.sh
env:
E2E_DOCKER_TAG_FRONTEND: ${{needs.docker-metadata.outputs.e2e-frontend}}
E2E_DOCKER_TAG_BACKEND: ${{needs.docker-metadata.outputs.e2e-backend}}
- name: Run tests
run: ./start-test.sh
env:
E2E_DOCKER_TAG_FRONTEND: ${{needs.docker-metadata.outputs.e2e-frontend}}
E2E_DOCKER_TAG_BACKEND: ${{needs.docker-metadata.outputs.e2e-backend}}
E2E_LLM_PROVIDER: ${{vars.E2E_LLM_PROVIDER}}
E2E_LLM_MODEL: ${{vars.E2E_LLM_MODEL || ''}}
E2E_LLM_CREDENTIALS: ${{secrets.E2E_LLM_CREDENTIALS}}
E2E_EMBEDDING_PROVIDER: ${{vars.E2E_EMBEDDING_PROVIDER || 'openai'}}
E2E_EMBEDDING_MODEL: ${{vars.E2E_EMBEDDING_MODEL || ''}}
E2E_EMBEDDING_CREDENTIALS: ${{secrets.E2E_EMBEDDING_CREDENTIALS}}
E2E_RERANKER_PROVIDER: ${{vars.E2E_RERANKER_PROVIDER || 'jinaai'}}
E2E_RERANKER_MODEL: ${{vars.E2E_RERANKER_MODEL || ''}}
E2E_RERANKER_CREDENTIALS: ${{secrets.E2E_RERANKER_CREDENTIALS}}
- uses: actions/upload-artifact@v4
if: "!cancelled()"
with:
name: e2e-report
path: e2e/playwright-report/
retention-days: 30
e2e-test-deploy:
name: "Deploy E2E Test Results"
runs-on: ubuntu-latest
needs: e2e-test
if: |
always()
&& !contains(needs.e2e-test.result, 'skipped')
&& !contains(needs.e2e-test.result, 'cancelled')
defaults:
run:
working-directory: e2e
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/download-artifact@v4
with:
name: e2e-report
path: e2e/playwright-report/
- name: Deploy Test Results to Vercel
id: deploy-test-result
run: |
deploy_message=$(./deploy-test-result.sh)
echo "deploy-message=${deploy_message}" >> $GITHUB_OUTPUT
if: "!cancelled()"
env:
VERCEL_TOKEN: ${{secrets.E2E_VERCEL_TOKEN}}
VERCEL_ORG_ID: ${{secrets.E2E_VERCEL_ORG_ID}}
VERCEL_PROJECT_ID: ${{secrets.E2E_VERCEL_PROJECT_ID}}
VERCEL_CLI_ARGS: ${{(github.ref == 'refs/heads/main' && '--prod') || ''}}
- name: Find PR Comment
uses: peter-evans/find-comment@v3
if: github.event.pull_request.number
id: find-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: E2E Result Deployment
- name: Upsert Comment on PR
uses: peter-evans/create-or-update-comment@v4
if: github.event.pull_request.number
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
body: |
### E2E Result Deployment
${{steps.deploy-test-result.outputs.deploy-message}}
edit-mode: replace
================================================
FILE: .github/workflows/verify.yml
================================================
name: Verify
on:
push:
branches:
- main
paths:
- frontend/**
- README.md
pull_request:
branches:
- main
paths:
- frontend/**
- README.md
jobs:
verify-frontend:
name: Verify frontend
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./frontend
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
with:
run_install: false
package_json_file: ./frontend/package.json
- name: Install Node.js
uses: actions/setup-node@v4
with:
node-version-file: ./frontend/.nvmrc
cache-dependency-path: ./frontend
cache: 'pnpm'
- run: pnpm i --frozen-lockfile
- uses: actions/cache@v4
with:
path: |
./frontend/.next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('frontend/**/pnpm-lock.yaml') }}-${{ hashFiles('frontend/**/*.js', 'frontend/**/*.jsx', 'frontend/**/*.ts', 'frontend/**/*.tsx') }}
- run: pnpm run verify
================================================
FILE: .gitignore
================================================
*_dev.ipynb
.idea
.vscode
.env
.ruff_cache
redis-data
data
venv
.venv
local-embedding-reranker
*.swp
*.swo
.next
node_modules/
_pagefind/
================================================
FILE: CONTRIBUTING.md
================================================
# How to contribute
## Contributing Guidelines
[pingcap/autoflow](https://github.com/pingcap/autoflow) is an open-source project and we welcome contributions from the community. If you are interested in contributing to the project, please read the following guidelines.
### Before You Get Started
#### Software Prerequisites for Development
In this section, you should have some prerequisites software installed on your local machine:
* [Docker](https://docs.docker.com/get-docker/)
* [Docker Compose](https://docs.docker.com/compose/install/)
* [Python](https://www.python.org/downloads/)
* [Node.js](https://nodejs.org/en/download/)
* [TiDB Cloud Serverless](https://pingcap.com/ai/?utm_source=tidb.ai&utm_medium=community) or [TiDB Self-Managed](https://www.pingcap.com/tidb-self-managed/?utm_source=tidb.ai&utm_medium=community)
#### Setting up your development environment
Setting up the project on your local machine is the first step to contributing to the project. You can clone the project from the GitHub repository and then start the project on your local machine. You can follow the instructions in the [Deployment Guide](https://autoflow.tidb.ai/deploy-with-docker) file to set up the project on your local machine.
To test your local changes, you can build and run the project using:
```bash
docker compose -f docker-compose.dev.yml up
```
### Your First Contribution
All set to participate in the project? You can start by looking at the [open issues](https://github.com/pingcap/autoflow/issues) in this repo.
### Components of the Project
The project is divided into several components, and you can contribute to any of the following components:
* [Frontend](https://github.com/pingcap/autoflow/tree/main/frontend): The frontend of the project is built using Next.js.
* [Backend](https://github.com/pingcap/tidb.ai/tree/main/backend): The backend of the project is built using FastAPI.
* [Data Source](https://github.com/pingcap/autoflow/tree/main/backend/app/rag/datasource): The Data Source component is responsible for indexing the data from different type of sources. You can add more data source types to the project.
* [LLM](https://github.com/pingcap/tidb.ai/tree/main/backend/app/rag/llms): The LLM Engine component is responsible for extracting knowledge from docs and generating responses. You can add more LLM models support to the project.
* [Reranker](https://github.com/pingcap/tidb.ai/blob/main/backend/app/rag/reranker_model_option.py): The Reranker Engine component is responsible for reranking the results retrieved from the database. You can add more Reranker models support to the project.
* [Embedding](https://github.com/pingcap/tidb.ai/blob/main/backend/app/rag/embed_model_option.py): The Embedding Engine component is responsible for converting text into vectors. You can add more Embedding models support to the project.
* [RAG & GraphRAG Engine](https://github.com/pingcap/tidb.ai/tree/main/backend/app/rag): The component is responsible for extracting knowldge from docs and then chunking, indexing and storing the data in the database, also includes retrieving the data from the database and generating the answer for the user.
* [Documentations](https://github.com/pingcap/tidb.ai/tree/main/frontend/app/src/pages): The documentation of the project is written in Markdown files. You can contribute to the documentation by adding more content to the documentation.
### How to add an API?
Using the FastAPI framework.
* Create a FastAPI Instance.
```python
router = FastAPI()
```
* Use Decorators to Define API Endpoints. For example:
```python
@router.get("xxx")
```
* Implement the Route Handler Function.
* Add sub-routes to the main route and tag the sub-routes in `backend/app/api/main.py` .
```python
api_router.include_router(sub_router, tags=["xxxx"])
```
## Maintainers
Please feel free to reach out to the maintainers if you have any questions or need help with the project.
* [wd0517](https://github.com/wd0517)
* [634750802](https://github.com/634750802)
* [Mini256](https://github.com/Mini256)
* [IANTHEREAL](https://github.com/IANTHEREAL)
* [Cheese](https://github.com/Icemap)
## Discussion
If you have any questions or suggestions, please feel free to open a discussion in the [Discussions](https://github.com/pingcap/tidb.ai/discussions)
================================================
FILE: LICENSE.txt
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2025 PingCAP
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: README.md
================================================
<!-- markdownlint-disable MD033 MD041 -->
<div align="center">
<h1>AutoFlow</h1>
<a href='https://www.pingcap.com/tidb-cloud-serverless/?utm_source=tidb.ai&utm_medium=community'>
<img src="https://raw.githubusercontent.com/pingcap/autoflow/refs/heads/main/docs/public/icon-dark.svg" alt="AutoFlow" width =100 height=100></img>
</a>
<a href="https://trendshift.io/repositories/12294" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12294" alt="pingcap%2Fautoflow | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
[](https://hub.docker.com/r/tidbai/backend)
[](https://hub.docker.com/r/tidbai/frontend)
[](https://tidb-ai-playwright.vercel.app/)
</div>
> [!WARNING]
> Autoflow is still in the early stages of development. And we are actively working on it, the next move is to make it to a python package and make it a RAG solution e.g. `pip install autoflow-ai`. If you have any questions or suggestions, please feel free to contact us on [Discussion](https://github.com/pingcap/autoflow/discussions).
## Introduction
AutoFlow is an open source graph rag (graphrag: knowledge graph rag) based knowledge base tool built on top of [TiDB Vector](https://www.pingcap.com/ai?utm_source=tidb.ai&utm_medium=community) and [LlamaIndex](https://github.com/run-llama/llama_index) and [DSPy](https://github.com/stanfordnlp/dspy).
- **Live Demo**: [https://tidb.ai](https://tidb.ai?utm_source=tidb.ai&utm_medium=community)
- **Deployment Docs**: [Deployment Docs](https://autoflow.tidb.ai/?utm_source=github&utm_medium=tidb.ai)
## Features
1. **Perplexity-style Conversational Search page**: Our platform features an advanced built-in website crawler, designed to elevate your browsing experience. This crawler effortlessly navigates official and documentation sites, ensuring comprehensive coverage and streamlined search processes through sitemap URL scraping.

2. **Embeddable JavaScript Snippet**: Integrate our conversational search window effortlessly into your website by copying and embedding a simple JavaScript code snippet. This widget, typically placed at the bottom right corner of your site, facilitates instant responses to product-related queries.

## Deploy
- [Deploy with Docker Compose](https://autoflow.tidb.ai/deploy-with-docker) (with: 4 CPU cores and 8GB RAM)
## Tech Stack
- [TiDB](https://www.pingcap.com/ai?utm_source=tidb.ai&utm_medium=community) – Database to store chat history, vector, json, and analytic
- [LlamaIndex](https://www.llamaindex.ai/) - RAG framework
- [DSPy](https://github.com/stanfordnlp/dspy) - The framework for programming—not prompting—foundation models
- [Next.js](https://nextjs.org/) – Framework
- [Tailwind CSS](https://tailwindcss.com/) – CSS framework
- [shadcn/ui](https://ui.shadcn.com/) - Design
## Contributing
We welcome contributions from the community. If you are interested in contributing to the project, please read the [Contributing Guidelines](/CONTRIBUTING.md).
<a href="https://next.ossinsight.io/widgets/official/compose-last-28-days-stats?repo_id=752946440" target="_blank" style="display: block" align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=752946440&image_size=auto&color_scheme=dark" width="655" height="auto">
<img alt="Performance Stats of pingcap/autoflow - Last 28 days" src="https://next.ossinsight.io/widgets/official/compose-last-28-days-stats/thumbnail.png?repo_id=752946440&image_size=auto&color_scheme=light" width="655" height="auto">
</picture>
</a>
<!-- Made with [OSS Insight](https://ossinsight.io/) -->
## License
AutoFlow is open-source under the Apache License, Version 2.0. You can [find it here](https://github.com/pingcap/autoflow/blob/main/LICENSE.txt).
## Contact
You can reach out to us on [Discord](https://discord.gg/XzSW23Jg9p).
================================================
FILE: backend/.dockerignore
================================================
# MacOS
.DS_Store
# Environment
.env
.venv
# Git
.git
# IDE
.idea
.vscode
# Cache
.pytest_cache
.ruff_cache
**/__pycache__
# Test
.tox
tests
pytest.ini
# Extra
local_embedding_reranker/
.pre-commit-config.yaml
.gitignore
Dockerfile
.dockerignore
================================================
FILE: backend/.gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.ruff_cache
*.csv
.DS_Store
# VSCode
.vscode/
checkpoint.json
================================================
FILE: backend/.pre-commit-config.yaml
================================================
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.8.6
hooks:
# Run the linter.
- id: ruff
# Run the formatter.
- id: ruff-format
================================================
FILE: backend/.python-version
================================================
3.12.3
================================================
FILE: backend/Dockerfile
================================================
FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim
WORKDIR /app/
# Setup supervisord.
RUN apt-get update && apt-get install -y supervisor gcc
COPY supervisord.conf /usr/etc/supervisord.conf
# Install dependencies.
COPY uv.lock /app/uv.lock
COPY pyproject.toml /app/pyproject.toml
RUN PYTHONDONTWRITEBYTECODE=1 uv sync --frozen
ENV PATH="/app/.venv/bin:$PATH"
# Pre-download playwright dependencies.
RUN playwright install --with-deps chromium
# Pre-download nltk data.
RUN python -c 'import nltk; \
download_dir = "/usr/local/lib/python3.11/site-packages/llama_index/core/_static/nltk_cache";\
nltk.download("stopwords", download_dir=download_dir);\
nltk.download("punkt", download_dir=download_dir);'
ENV PYTHONPATH=/app
COPY . /app/
# Default number of workers
ENV WEB_CONCURRENCY=4
CMD ["sh", "-c", "fastapi run app/api_server.py --host 0.0.0.0 --port 80 --workers ${WEB_CONCURRENCY}"]
================================================
FILE: backend/Makefile
================================================
.PHONY: test
makemigrations:
@echo "Creating migrations..."
@if [ -z "$(NAME)" ]; then \
uv run alembic revision --autogenerate; \
else \
uv run alembic revision --autogenerate -m "$(NAME)"; \
fi
migrate:
@echo "Migrating database..."
@uv run alembic upgrade head
lint:
@echo "Linting code..."
@uv run ruff check .
format:
@echo "Formatting code..."
@uv run ruff format .
test:
@echo "Running tests..."
@uv run pytest -v tests/
dev_backend:
@echo "Running backend server in development mode..."
@uv run fastapi dev app/api_server.py --host 127.0.0.1 --port 5001
run_backend:
@echo "Running backend server..."
@uv run fastapi run app/api_server.py --host 0.0.0.0 --port 5001 --workers 4
dev_celery_flower:
@echo "Running Celery Flower..."
@uv run celery -A app.celery flower --address=0.0.0.0 --port=5555
dev_background_worker:
@echo "Running background worker..."
@uv run celery -A app.celery worker -Q default -l INFO -E
dev_eval_worker:
@echo "Running evaluation worker..."
@uv run celery -A app.celery worker -Q evaluation --loglevel=debug --pool=solo
================================================
FILE: backend/README.md
================================================
# Backend of tidb.ai
## Development
### Install dependencies
1. Install [uv](https://docs.astral.sh/uv/getting-started/installation/)
2. Use `uv` to install dependencies
```bash
uv sync
```
### Prepare environment
```
cp .env.example .env
```
Edit `.env` to set environment variables.
### Run migrations
```bash
make migrate
```
### Run development server
```bash
uv run python main.py runserver
```
================================================
FILE: backend/alembic.ini
================================================
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = app/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
================================================
FILE: backend/app/__init__.py
================================================
import os
os.environ["LITELLM_LOCAL_MODEL_COST_MAP"] = "True"
================================================
FILE: backend/app/alembic/env.py
================================================
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
from sqlmodel import SQLModel
from tidb_vector.sqlalchemy import VectorType
from app.core.config import settings
from app.models import * # noqa
from app.models.knowledge_base_scoped.table_naming import (
KB_CHUNKS_TABLE_PATTERN,
KB_ENTITIES_TABLE_PATTERN,
KB_RELATIONSHIPS_TABLE_PATTERN,
)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# target_metadata = None
target_metadata = SQLModel.metadata
def get_url():
return str(settings.SQLALCHEMY_DATABASE_URI)
def include_name(name, type_, parent_names):
if type_ == "table":
return (
not bool(KB_CHUNKS_TABLE_PATTERN.match(name))
and not bool(KB_ENTITIES_TABLE_PATTERN.match(name))
and not bool(KB_RELATIONSHIPS_TABLE_PATTERN.match(name))
)
else:
return True
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = get_url()
context.configure(
url=url,
target_metadata=target_metadata,
include_name=include_name,
literal_binds=True,
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
configuration = config.get_section(config.config_ini_section)
configuration["sqlalchemy.url"] = get_url()
connectable = engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
connection.dialect.ischema_names["vector"] = VectorType
context.configure(
connection=connection,
target_metadata=target_metadata,
include_name=include_name,
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
================================================
FILE: backend/app/alembic/script.py.mako
================================================
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from tidb_vector.sqlalchemy import VectorType
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
================================================
FILE: backend/app/alembic/versions/00534dc350db_.py
================================================
"""empty message
Revision ID: 00534dc350db
Revises: 10f36e8a25c4
Create Date: 2024-08-26 12:46:00.203425
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "00534dc350db"
down_revision = "10f36e8a25c4"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"embedding_models",
"provider",
existing_type=mysql.ENUM("OPENAI"),
type_=sa.String(length=32),
existing_nullable=False,
)
op.alter_column(
"llms",
"provider",
existing_type=mysql.ENUM(
"OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE", "BEDROCK"
),
type_=sa.String(length=32),
existing_nullable=False,
)
op.alter_column(
"reranker_models",
"provider",
existing_type=mysql.ENUM("JINA", "COHERE", "BAISHENG"),
type_=sa.String(length=32),
existing_nullable=False,
)
op.execute("UPDATE embedding_models SET provider = lower(provider)")
op.execute("UPDATE llms SET provider = lower(provider)")
op.execute("UPDATE reranker_models SET provider = lower(provider)")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"reranker_models",
"provider",
existing_type=sa.String(length=32),
type_=mysql.ENUM("JINA", "COHERE", "BAISHENG"),
existing_nullable=False,
)
op.alter_column(
"llms",
"provider",
existing_type=sa.String(length=32),
type_=mysql.ENUM(
"OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE", "BEDROCK"
),
existing_nullable=False,
)
op.alter_column(
"embedding_models",
"provider",
existing_type=sa.String(length=32),
type_=mysql.ENUM("OPENAI"),
existing_nullable=False,
)
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/041fbef26e3a_.py
================================================
"""empty message
Revision ID: 041fbef26e3a
Revises: 04d81be446c3
Create Date: 2024-08-19 08:20:13.695891
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.mysql import DATETIME
# revision identifiers, used by Alembic.
revision = "041fbef26e3a"
down_revision = "04d81be446c3"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"site_settings",
"created_at",
existing_type=DATETIME(timezone=True, fsp=6),
server_default=sa.text("current_timestamp(6)"),
nullable=False,
)
op.alter_column(
"site_settings",
"updated_at",
existing_type=DATETIME(timezone=True, fsp=6),
server_default=sa.text("current_timestamp(6) on update current_timestamp(6)"),
nullable=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"site_settings",
"created_at",
existing_type=DATETIME(timezone=True),
nullable=False,
)
op.alter_column(
"site_settings",
"updated_at",
existing_type=DATETIME(timezone=True),
nullable=False,
)
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/04947f9684ab_public_chat_engine.py
================================================
"""public_chat_engine
Revision ID: 04947f9684ab
Revises: 211f3c5aa125
Create Date: 2025-05-28 15:13:22.058160
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "04947f9684ab"
down_revision = "211f3c5aa125"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("chat_engines", sa.Column("is_public", sa.Boolean(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("chat_engines", "is_public")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/04d4f05116ed_.py
================================================
"""empty message
Revision ID: 04d4f05116ed
Revises: 94b198e20946
Create Date: 2024-07-23 01:26:07.117623
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from app.models.base import AESEncryptedColumn
# revision identifiers, used by Alembic.
revision = "04d4f05116ed"
down_revision = "94b198e20946"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"embedding_models",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),
sa.Column(
"provider", sa.Enum("OPENAI", name="embeddingprovider"), nullable=False
),
sa.Column(
"model", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False
),
sa.Column("config", sa.JSON(), nullable=True),
sa.Column("credentials", AESEncryptedColumn(), nullable=True),
sa.Column("is_default", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"llms",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),
sa.Column(
"provider",
sa.Enum("OPENAI", "GEMINI", "ANTHROPIC_VERTEX", name="llmprovider"),
nullable=False,
),
sa.Column(
"model", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False
),
sa.Column("config", sa.JSON(), nullable=True),
sa.Column("credentials", AESEncryptedColumn(), nullable=True),
sa.Column("is_default", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("chat_engines", sa.Column("llm_id", sa.Integer(), nullable=True))
op.add_column("chat_engines", sa.Column("fast_llm_id", sa.Integer(), nullable=True))
op.create_foreign_key(None, "chat_engines", "llms", ["fast_llm_id"], ["id"])
op.create_foreign_key(None, "chat_engines", "llms", ["llm_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "chat_engines", type_="foreignkey")
op.drop_constraint(None, "chat_engines", type_="foreignkey")
op.drop_column("chat_engines", "fast_llm_id")
op.drop_column("chat_engines", "llm_id")
op.drop_table("llms")
op.drop_table("embedding_models")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/04d81be446c3_.py
================================================
"""empty message
Revision ID: 04d81be446c3
Revises: e32f1e546eec
Create Date: 2024-08-08 17:11:50.178696
"""
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "04d81be446c3"
down_revision = "e32f1e546eec"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llms",
"provider",
existing_type=mysql.ENUM(
"OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE", "BEDROCK"
),
nullable=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llms",
"provider",
existing_type=mysql.ENUM("OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE"),
nullable=False,
)
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/10f36e8a25c4_.py
================================================
"""empty message
Revision ID: 10f36e8a25c4
Revises: 041fbef26e3a
Create Date: 2024-08-20 09:10:50.130219
"""
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "10f36e8a25c4"
down_revision = "041fbef26e3a"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"reranker_models",
"provider",
existing_type=mysql.ENUM("JINA", "COHERE", "BAISHENG"),
nullable=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"reranker_models",
"provider",
existing_type=mysql.ENUM("JINA", "COHERE"),
nullable=False,
)
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/197bc8be72d1_.py
================================================
"""empty message
Revision ID: 197bc8be72d1
Revises: 04d4f05116ed
Create Date: 2024-07-25 14:49:29.363595
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "197bc8be72d1"
down_revision = "04d4f05116ed"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llms",
"provider",
type_=sa.Enum(
"OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE", name="llmprovider"
),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llms",
"provider",
type_=sa.Enum("OPENAI", "GEMINI", "ANTHROPIC_VERTEX", name="llmprovider"),
)
================================================
FILE: backend/app/alembic/versions/211f3c5aa125_chunking_settings.py
================================================
"""chunking_settings
Revision ID: 211f3c5aa125
Revises: 2adc0b597dcd
Create Date: 2025-02-17 14:20:56.253857
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "211f3c5aa125"
down_revision = "2adc0b597dcd"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"knowledge_bases", sa.Column("chunking_config", sa.JSON(), nullable=True)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("knowledge_bases", "chunking_config")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/27a6723b767a_.py
================================================
"""empty message
Revision ID: 27a6723b767a
Revises: d2ad44deab20
Create Date: 2024-11-29 20:38:05.773083
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "27a6723b767a"
down_revision = "d2ad44deab20"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"chat_messages",
sa.Column("is_best_answer", sa.Boolean(), server_default="0", nullable=False),
)
op.create_index(
"ix_chat_message_is_best_answer",
"chat_messages",
["is_best_answer"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("ix_chat_message_is_best_answer", table_name="chat_messages")
op.drop_column("chat_messages", "is_best_answer")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/2adc0b597dcd_int_enum_type.py
================================================
"""int_enum_type
Revision ID: 2adc0b597dcd
Revises: a54f966436ce
Create Date: 2025-01-24 17:58:08.339090
"""
from alembic import op
from sqlalchemy.dialects import mysql
from app.models.base import IntEnumType
from app.models.chat import ChatVisibility
# revision identifiers, used by Alembic.
revision = "2adc0b597dcd"
down_revision = "a54f966436ce"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"chats",
"visibility",
existing_type=mysql.SMALLINT(),
type_=IntEnumType(ChatVisibility),
existing_nullable=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"chats",
"visibility",
existing_type=IntEnumType(ChatVisibility),
type_=mysql.SMALLINT(),
existing_nullable=False,
)
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/2fc10c21bf88_.py
================================================
"""empty message
Revision ID: 5fdea8e26454
Revises:
Create Date: 2024-07-10 14:43:55.913126
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from tidb_vector.sqlalchemy import VectorType
from sqlalchemy.dialects import mysql
from app.core.config import settings
# revision identifiers, used by Alembic.
revision = "2fc10c21bf88"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"chat_engines",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),
sa.Column("engine_options", sa.JSON(), nullable=True),
sa.Column("is_default", sa.Boolean(), nullable=False),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"documents",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("hash", sqlmodel.sql.sqltypes.AutoString(length=32), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),
sa.Column("content", mysql.MEDIUMTEXT(), nullable=True),
sa.Column(
"mime_type", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False
),
sa.Column(
"source_uri", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=False
),
sa.Column("meta", sa.JSON(), nullable=True),
sa.Column("last_modified_at", sa.DateTime(), nullable=True),
sa.Column(
"index_status",
sa.Enum(
"NOT_STARTED",
"PENDING",
"RUNNING",
"COMPLETED",
"FAILED",
name="docindextaskstatus",
),
nullable=False,
),
sa.Column("index_result", sa.Text(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("source_uri"),
)
op.create_table(
"entities",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("meta", sa.JSON(), nullable=True),
sa.Column(
"entity_type",
sa.Enum("original", "synopsis", name="entitytype"),
nullable=False,
),
sa.Column("synopsis_info", sa.JSON(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"description_vec",
VectorType(dim=settings.EMBEDDING_DIMS),
nullable=True,
comment="hnsw(distance=cosine)",
),
sa.Column(
"meta_vec",
VectorType(dim=settings.EMBEDDING_DIMS),
nullable=True,
comment="hnsw(distance=cosine)",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"semantic_cache",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("query", sa.Text(), nullable=True),
sa.Column(
"query_vec",
VectorType(dim=settings.EMBEDDING_DIMS),
nullable=True,
comment="hnsw(distance=cosine)",
),
sa.Column("value", sa.Text(), nullable=True),
sa.Column(
"value_vec",
VectorType(dim=settings.EMBEDDING_DIMS),
nullable=True,
comment="hnsw(distance=cosine)",
),
sa.Column("meta", sa.JSON(), nullable=True),
sa.Column(
"created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=True
),
sa.Column(
"updated_at", sa.DateTime(), server_default=sa.text("now()"), nullable=True
),
sa.PrimaryKeyConstraint("id"),
mysql_TTL="created_at + INTERVAL 1 MONTH;",
)
op.create_table(
"site_settings",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),
sa.Column(
"data_type", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False
),
sa.Column("value", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
op.create_table(
"staff_action_logs",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("action", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"action_time", sa.DateTime(), server_default=sa.text("now()"), nullable=True
),
sa.Column("target_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("target_id", sa.Integer(), nullable=False),
sa.Column("before", sa.JSON(), nullable=True),
sa.Column("after", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"users",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("email", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"hashed_password", sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("is_superuser", sa.Boolean(), nullable=False),
sa.Column("is_verified", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
op.create_index(op.f("ix_users_id"), "users", ["id"], unique=False)
op.create_table(
"api_keys",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"description", sqlmodel.sql.sqltypes.AutoString(length=100), nullable=False
),
sa.Column(
"hashed_secret",
sqlmodel.sql.sqltypes.AutoString(length=255),
nullable=False,
),
sa.Column(
"api_key_display",
sqlmodel.sql.sqltypes.AutoString(length=100),
nullable=False,
),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("hashed_secret"),
)
op.create_table(
"chats",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column(
"title", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False
),
sa.Column("engine_id", sa.Integer(), nullable=True),
sa.Column("engine_options", sa.JSON(), nullable=True),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.ForeignKeyConstraint(
["engine_id"],
["chat_engines.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_chats_id"), "chats", ["id"], unique=False)
op.create_table(
"chunks",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("hash", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),
sa.Column("text", sa.Text(), nullable=True),
sa.Column("meta", sa.JSON(), nullable=True),
sa.Column(
"embedding",
VectorType(dim=settings.EMBEDDING_DIMS),
nullable=True,
comment="hnsw(distance=cosine)",
),
sa.Column("document_id", sa.Integer(), nullable=True),
sa.Column("relations", sa.JSON(), nullable=True),
sa.Column(
"source_uri", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=True
),
sa.Column(
"index_status",
sa.Enum(
"NOT_STARTED",
"PENDING",
"RUNNING",
"COMPLETED",
"FAILED",
name="kgindexstatus",
),
nullable=False,
),
sa.Column("index_result", sa.Text(), nullable=True),
sa.ForeignKeyConstraint(
["document_id"],
["documents.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_chunks_id"), "chunks", ["id"], unique=False)
op.create_table(
"relationships",
sa.Column("description", sa.Text(), nullable=True),
sa.Column("meta", sa.JSON(), nullable=True),
sa.Column("weight", sa.Integer(), nullable=False),
sa.Column("source_entity_id", sa.Integer(), nullable=False),
sa.Column("target_entity_id", sa.Integer(), nullable=False),
sa.Column("last_modified_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"description_vec",
VectorType(dim=settings.EMBEDDING_DIMS),
nullable=True,
comment="hnsw(distance=cosine)",
),
sa.ForeignKeyConstraint(
["source_entity_id"],
["entities.id"],
),
sa.ForeignKeyConstraint(
["target_entity_id"],
["entities.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"user_sessions",
sa.Column("token", sqlmodel.sql.sqltypes.AutoString(length=43), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("token"),
)
op.create_table(
"chat_messages",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ordinal", sa.Integer(), nullable=False),
sa.Column("role", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),
sa.Column("content", sa.Text(), nullable=True),
sa.Column("error", sa.Text(), nullable=True),
sa.Column("sources", sa.JSON(), nullable=True),
sa.Column(
"trace_url", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=True
),
sa.Column("finished_at", sa.DateTime(), nullable=True),
sa.Column("chat_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.ForeignKeyConstraint(
["chat_id"],
["chats.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"feedbacks",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"feedback_type",
sa.Enum("LIKE", "DISLIKE", name="feedbacktype"),
nullable=False,
),
sa.Column(
"comment", sqlmodel.sql.sqltypes.AutoString(length=500), nullable=False
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("chat_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("chat_message_id", sa.Integer(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.ForeignKeyConstraint(
["chat_id"],
["chats.id"],
),
sa.ForeignKeyConstraint(
["chat_message_id"],
["chat_messages.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("feedbacks")
op.drop_table("chat_messages")
op.drop_table("user_sessions")
op.drop_table("relationships")
op.drop_index(op.f("ix_chunks_id"), table_name="chunks")
op.drop_table("chunks")
op.drop_index(op.f("ix_chats_id"), table_name="chats")
op.drop_table("chats")
op.drop_table("api_keys")
op.drop_index(op.f("ix_users_id"), table_name="users")
op.drop_index(op.f("ix_users_email"), table_name="users")
op.drop_table("users")
op.drop_table("staff_action_logs")
op.drop_table("site_settings")
op.drop_table("semantic_cache")
op.drop_table("entities")
op.drop_table("documents")
op.drop_table("chat_engines")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/749767db5505_add_recommend_questions.py
================================================
"""add recommend questions
Revision ID: 749767db5505
Revises: 8093333c0d87
Create Date: 2024-10-15 16:02:14.203584
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "749767db5505"
down_revision = "8093333c0d87"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"recommend_questions",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("questions", sa.JSON(), nullable=True),
sa.Column("chat_message_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["chat_message_id"],
["chat_messages.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_recommend_questions_chat_message_id"),
"recommend_questions",
["chat_message_id"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(
op.f("ix_recommend_questions_chat_message_id"), table_name="recommend_questions"
)
op.drop_table("recommend_questions")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/8093333c0d87_.py
================================================
"""empty message
Revision ID: 8093333c0d87
Revises: 830fd9c44f39
Create Date: 2024-09-24 12:23:48.076576
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "8093333c0d87"
down_revision = "830fd9c44f39"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("chats", sa.Column("visibility", sa.SmallInteger(), nullable=False))
op.execute("UPDATE chats SET visibility = 1 WHERE user_id IS NULL;")
op.execute(
"UPDATE chats SET engine_options = CAST(JSON_UNQUOTE(engine_options) AS JSON);"
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("chats", "visibility")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/830fd9c44f39_.py
================================================
"""empty message
Revision ID: 830fd9c44f39
Revises: dfee070b8abd
Create Date: 2024-09-19 13:04:30.351449
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
# revision identifiers, used by Alembic.
revision = "830fd9c44f39"
down_revision = "dfee070b8abd"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"chats",
sa.Column(
"origin", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=True
),
)
op.add_column(
"feedbacks",
sa.Column(
"origin", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=True
),
)
op.add_column(
"chat_messages",
sa.Column(
"post_verification_result_url",
sqlmodel.sql.sqltypes.AutoString(length=512),
nullable=True,
),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("chat_messages", "post_verification_result_url")
op.drop_column("feedbacks", "origin")
op.drop_column("chats", "origin")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/94b198e20946_.py
================================================
"""empty message
Revision ID: 94b198e20946
Revises: 2fc10c21bf88
Create Date: 2024-07-11 15:19:19.174568
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
# revision identifiers, used by Alembic.
revision = "94b198e20946"
down_revision = "2fc10c21bf88"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"data_sources",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False),
sa.Column(
"description", sqlmodel.sql.sqltypes.AutoString(length=512), nullable=False
),
sa.Column(
"data_source_type",
sqlmodel.sql.sqltypes.AutoString(length=256),
nullable=False,
),
sa.Column("config", sa.JSON(), nullable=True),
sa.Column("build_kg_index", sa.Boolean(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"uploads",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
sa.Column("size", sa.Integer(), nullable=False),
sa.Column("path", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
sa.Column(
"mime_type", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False
),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("documents", sa.Column("data_source_id", sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("documents", "data_source_id")
op.drop_table("uploads")
op.drop_table("data_sources")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/a54f966436ce_evaluation.py
================================================
"""evaluation
Revision ID: a54f966436ce
Revises: 27a6723b767a
Create Date: 2024-12-09 16:46:21.077517
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
# revision identifiers, used by Alembic.
revision = "a54f966436ce"
down_revision = "27a6723b767a"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"evaluation_datasets",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"evaluation_tasks",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("dataset_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"evaluation_dataset_items",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("query", sa.Text(), nullable=True),
sa.Column("reference", sa.Text(), nullable=True),
sa.Column("retrieved_contexts", sa.JSON(), nullable=True),
sa.Column("extra", sa.JSON(), nullable=True),
sa.Column("evaluation_dataset_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["evaluation_dataset_id"],
["evaluation_datasets.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"evaluation_task_items",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"chat_engine", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False
),
sa.Column("status", sa.String(length=32), nullable=False),
sa.Column("query", sa.Text(), nullable=True),
sa.Column("reference", sa.Text(), nullable=True),
sa.Column("response", sa.Text(), nullable=True),
sa.Column("retrieved_contexts", sa.JSON(), nullable=True),
sa.Column("extra", sa.JSON(), nullable=True),
sa.Column("error_msg", sa.Text(), nullable=True),
sa.Column("factual_correctness", sa.Float(), nullable=True),
sa.Column("semantic_similarity", sa.Float(), nullable=True),
sa.Column("evaluation_task_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["evaluation_task_id"],
["evaluation_tasks.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("evaluation_task_items")
op.drop_table("evaluation_dataset_items")
op.drop_table("evaluation_tasks")
op.drop_table("evaluation_datasets")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/a8c79553c9f6_.py
================================================
"""empty message
Revision ID: a8c79553c9f6
Revises: ac6e4d58580d
Create Date: 2024-08-05 13:04:17.572821
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a8c79553c9f6"
down_revision = "ac6e4d58580d"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("data_sources", sa.Column("llm_id", sa.Integer(), nullable=True))
op.create_foreign_key(None, "data_sources", "llms", ["llm_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "data_sources", type_="foreignkey")
op.drop_column("data_sources", "llm_id")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/ac6e4d58580d_.py
================================================
"""empty message
Revision ID: ac6e4d58580d
Revises: 197bc8be72d1
Create Date: 2024-08-01 16:15:59.164348
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "ac6e4d58580d"
down_revision = "197bc8be72d1"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"chats",
sa.Column(
"browser_id", sqlmodel.sql.sqltypes.AutoString(length=50), nullable=True
),
)
op.alter_column(
"llms",
"provider",
existing_type=mysql.ENUM("OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE"),
nullable=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llms",
"provider",
existing_type=mysql.ENUM("OPENAI", "GEMINI", "ANTHROPIC_VERTEX", "OPENAI_LIKE"),
nullable=True,
)
op.drop_column("chats", "browser_id")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/bd17a4ebccc5_.py
================================================
"""empty message
Revision ID: bd17a4ebccc5
Revises: a8c79553c9f6
Create Date: 2024-08-08 01:20:42.069228
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
# revision identifiers, used by Alembic.
revision = "bd17a4ebccc5"
down_revision = "a8c79553c9f6"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("data_sources", sa.Column("deleted_at", sa.DateTime(), nullable=True))
op.drop_index("source_uri", table_name="documents")
op.add_column(
"relationships",
sa.Column("chunk_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
)
op.add_column(
"relationships",
sa.Column("document_id", sa.Integer(), nullable=True),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("relationships", "chunk_id")
op.drop_column("relationships", "document_id")
op.create_index("source_uri", "documents", ["source_uri"], unique=True)
op.drop_column("data_sources", "deleted_at")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/c7f016a904c1_.py
================================================
"""empty message
Revision ID: c7f016a904c1
Revises: 749767db5505
Create Date: 2024-10-30 13:28:17.345385
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "c7f016a904c1"
down_revision = "749767db5505"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("chat_messages", sa.Column("meta", sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("chat_messages", "meta")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/d2ad44deab20_multiple_kb.py
================================================
"""multiple_kb
Revision ID: d2ad44deab20
Revises: c7f016a904c1
Create Date: 2024-11-15 09:51:42.493749
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from sqlalchemy import update
from sqlmodel import Session
from sqlalchemy.dialects import mysql
from app.core.config import settings
from app.core.db import engine
from app.models import EmbeddingModel
# revision identifiers, used by Alembic.
revision = "d2ad44deab20"
down_revision = "c7f016a904c1"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"knowledge_bases",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
sa.Column("description", mysql.MEDIUMTEXT(), nullable=True),
sa.Column("index_methods", sa.JSON(), nullable=True),
sa.Column("llm_id", sa.Integer(), nullable=True),
sa.Column("embedding_model_id", sa.Integer(), nullable=True),
sa.Column("documents_total", sa.Integer(), nullable=False),
sa.Column("data_sources_total", sa.Integer(), nullable=False),
sa.Column("created_by", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column(
"created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=True
),
sa.Column("updated_by", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column(
"updated_at", sa.DateTime(), server_default=sa.text("now()"), nullable=True
),
sa.Column("deleted_by", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(
["created_by"],
["users.id"],
),
sa.ForeignKeyConstraint(
["deleted_by"],
["users.id"],
),
sa.ForeignKeyConstraint(
["embedding_model_id"],
["embedding_models.id"],
),
sa.ForeignKeyConstraint(
["llm_id"],
["llms.id"],
),
sa.ForeignKeyConstraint(
["updated_by"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"knowledge_base_datasources",
sa.Column("knowledge_base_id", sa.Integer(), nullable=False),
sa.Column("data_source_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["data_source_id"],
["data_sources.id"],
),
sa.ForeignKeyConstraint(
["knowledge_base_id"],
["knowledge_bases.id"],
),
sa.PrimaryKeyConstraint("knowledge_base_id", "data_source_id"),
)
op.add_column(
"documents", sa.Column("knowledge_base_id", sa.Integer(), nullable=True)
)
op.create_foreign_key(
"fk_d_on_data_source_id",
"documents",
"data_sources",
["data_source_id"],
["id"],
)
op.create_foreign_key(
"fk_d_on_knowledge_base_id",
"documents",
"knowledge_bases",
["knowledge_base_id"],
["id"],
)
op.add_column(
"embedding_models", sa.Column("vector_dimension", sa.Integer(), nullable=False)
)
# ### end Alembic commands ###
# ### Data Migration ###
with Session(engine) as session:
stmt = (
update(EmbeddingModel)
.where(EmbeddingModel.vector_dimension == 0)
.values(vector_dimension=settings.EMBEDDING_DIMS)
)
session.exec(stmt)
session.commit()
# ### end Data Migration ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("embedding_models", "vector_dimension")
op.drop_constraint("fk_d_on_data_source_id", "documents", type_="foreignkey")
op.drop_constraint("fk_d_on_knowledge_base_id", "documents", type_="foreignkey")
op.drop_column("documents", "knowledge_base_id")
op.drop_table("knowledge_base_datasources")
op.drop_table("knowledge_bases")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/dfee070b8abd_.py
================================================
"""empty message
Revision ID: dfee070b8abd
Revises: eb0b85608c0a
Create Date: 2024-09-10 10:45:50.318277
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "dfee070b8abd"
down_revision = "eb0b85608c0a"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("chat_messages", sa.Column("graph_data", sa.JSON(), nullable=True))
op.create_index("idx_entity_type", "entities", ["entity_type"], unique=False)
op.alter_column(
"site_settings",
"created_at",
existing_type=mysql.DATETIME(fsp=6),
nullable=True,
existing_server_default=sa.text("CURRENT_TIMESTAMP(6)"),
)
op.alter_column(
"site_settings",
"updated_at",
existing_type=mysql.DATETIME(fsp=6),
nullable=True,
existing_server_default=sa.text(
"CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6)"
),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"site_settings",
"updated_at",
existing_type=mysql.DATETIME(fsp=6),
nullable=False,
existing_server_default=sa.text(
"CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6)"
),
)
op.alter_column(
"site_settings",
"created_at",
existing_type=mysql.DATETIME(fsp=6),
nullable=False,
existing_server_default=sa.text("CURRENT_TIMESTAMP(6)"),
)
op.drop_index("idx_entity_type", table_name="entities")
op.drop_column("chat_messages", "graph_data")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/e32f1e546eec_.py
================================================
"""empty message
Revision ID: e32f1e546eec
Revises: bd17a4ebccc5
Create Date: 2024-08-08 03:55:14.042290
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from app.models.base import AESEncryptedColumn
# revision identifiers, used by Alembic.
revision = "e32f1e546eec"
down_revision = "bd17a4ebccc5"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"reranker_models",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=True,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False),
sa.Column(
"provider",
sa.Enum("JINA", "COHERE", name="rerankerprovider"),
nullable=False,
),
sa.Column(
"model", sqlmodel.sql.sqltypes.AutoString(length=256), nullable=False
),
sa.Column("top_n", sa.Integer(), nullable=False),
sa.Column("config", sa.JSON(), nullable=True),
sa.Column("is_default", sa.Boolean(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("credentials", AESEncryptedColumn(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("chat_engines", sa.Column("reranker_id", sa.Integer(), nullable=True))
op.create_foreign_key(
None, "chat_engines", "reranker_models", ["reranker_id"], ["id"]
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("chat_engines", "reranker_id")
op.drop_table("reranker_models")
# ### end Alembic commands ###
================================================
FILE: backend/app/alembic/versions/eb0b85608c0a_.py
================================================
"""empty message
Revision ID: eb0b85608c0a
Revises: 00534dc350db
Create Date: 2024-08-28 15:10:04.219389
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "eb0b85608c0a"
down_revision = "00534dc350db"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"documents",
"mime_type",
existing_type=mysql.VARCHAR(length=64),
type_=sa.String(length=128),
existing_nullable=False,
)
op.alter_column(
"uploads",
"mime_type",
existing_type=mysql.VARCHAR(length=64),
type_=sa.String(length=128),
existing_nullable=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"uploads",
"mime_type",
existing_type=sa.String(length=128),
type_=mysql.VARCHAR(length=64),
existing_nullable=False,
)
op.alter_column(
"documents",
"mime_type",
existing_type=sa.String(length=128),
type_=mysql.VARCHAR(length=64),
existing_nullable=False,
)
# ### end Alembic commands ###
================================================
FILE: backend/app/api/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/chat/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/chat/routes.py
================================================
from typing import Optional
from fastapi import APIRouter, Depends
from fastapi_pagination import Page, Params
from app.models.chat import ChatOrigin
from app.api.deps import CurrentSuperuserDep, SessionDep
from app.repositories import chat_repo
router = APIRouter(
prefix="/admin/chats",
tags=["admin/chats"],
)
@router.get("/origins")
def list_chat_origins(
db_session: SessionDep,
user: CurrentSuperuserDep,
search: Optional[str] = None,
params: Params = Depends(),
) -> Page[ChatOrigin]:
return chat_repo.list_chat_origins(db_session, search, params)
================================================
FILE: backend/app/api/admin_routes/chat_engine.py
================================================
from fastapi import APIRouter, Depends
from fastapi_pagination import Params, Page
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.exceptions import DefaultChatEngineCannotBeDeleted
from app.rag.chat.config import ChatEngineConfig
from app.repositories import chat_engine_repo
from app.models import ChatEngine, ChatEngineUpdate
router = APIRouter()
@router.get("/admin/chat-engines")
def list_chat_engines(
db_session: SessionDep,
user: CurrentSuperuserDep,
params: Params = Depends(),
) -> Page[ChatEngine]:
return chat_engine_repo.paginate(db_session, params)
@router.post("/admin/chat-engines")
def create_chat_engine(
db_session: SessionDep,
user: CurrentSuperuserDep,
chat_engine: ChatEngine,
) -> ChatEngine:
return chat_engine_repo.create(db_session, chat_engine)
@router.get("/admin/chat-engines/{chat_engine_id}")
def get_chat_engine(
db_session: SessionDep,
user: CurrentSuperuserDep,
chat_engine_id: int,
) -> ChatEngine:
return chat_engine_repo.must_get(db_session, chat_engine_id)
@router.put("/admin/chat-engines/{chat_engine_id}")
def update_chat_engine(
db_session: SessionDep,
user: CurrentSuperuserDep,
chat_engine_id: int,
update: ChatEngineUpdate,
) -> ChatEngine:
chat_engine = chat_engine_repo.must_get(db_session, chat_engine_id)
return chat_engine_repo.update(db_session, chat_engine, update)
@router.delete("/admin/chat-engines/{chat_engine_id}")
def delete_chat_engine(
db_session: SessionDep,
user: CurrentSuperuserDep,
chat_engine_id: int,
) -> ChatEngine:
chat_engine = chat_engine_repo.must_get(db_session, chat_engine_id)
if chat_engine.is_default:
raise DefaultChatEngineCannotBeDeleted(chat_engine_id)
return chat_engine_repo.delete(db_session, chat_engine)
@router.get("/admin/chat-engines-default-config")
def get_default_config(
db_session: SessionDep, user: CurrentSuperuserDep
) -> ChatEngineConfig:
return ChatEngineConfig()
================================================
FILE: backend/app/api/admin_routes/document/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/document/routes.py
================================================
from typing import Annotated
from fastapi import APIRouter, Depends, Query
from fastapi_pagination import Params, Page
from app.api.admin_routes.knowledge_base.document.models import (
DocumentFilters,
DocumentItem,
)
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.repositories import document_repo
router = APIRouter()
@router.get("/admin/documents")
def list_documents(
session: SessionDep,
user: CurrentSuperuserDep,
filters: Annotated[DocumentFilters, Query()],
params: Params = Depends(),
) -> Page[DocumentItem]:
return document_repo.paginate(
session=session,
filters=filters,
params=params,
)
================================================
FILE: backend/app/api/admin_routes/embedding_model/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/embedding_model/models.py
================================================
from datetime import datetime
from typing import Any
from pydantic import BaseModel, field_validator
from typing_extensions import Optional
from app.rag.embeddings.provider import EmbeddingProvider
class EmbeddingModelCreate(BaseModel):
name: str
provider: EmbeddingProvider
model: str
vector_dimension: int
config: dict | list | None
credentials: Any
is_default: Optional[bool] = False
@field_validator("vector_dimension")
def vector_dimension_must_gt_1(cls, v: int) -> int:
if v <= 0:
raise ValueError(
"The vector dimension of the Embedding model should be at least greater than 1."
)
return v
class EmbeddingModelUpdate(BaseModel):
name: Optional[str] = None
config: Optional[dict | list] = None
credentials: Optional[str | dict] = None
class EmbeddingModelItem(BaseModel):
id: int
name: str
provider: EmbeddingProvider
model: str
vector_dimension: int
is_default: bool
class EmbeddingModelDetail(BaseModel):
id: int
name: str
provider: EmbeddingProvider
model: str
vector_dimension: int
config: dict | list | None
is_default: bool
created_at: datetime
updated_at: datetime
class EmbeddingModelTestResult(BaseModel):
success: bool
error: str = ""
================================================
FILE: backend/app/api/admin_routes/embedding_model/routes.py
================================================
from typing import List
from fastapi import APIRouter, Depends
from fastapi_pagination import Params, Page
from app.api.admin_routes.embedding_model.models import (
EmbeddingModelItem,
EmbeddingModelDetail,
EmbeddingModelUpdate,
EmbeddingModelTestResult,
EmbeddingModelCreate,
)
from app.api.deps import CurrentSuperuserDep, SessionDep
from app.repositories.embedding_model import embedding_model_repo
from app.rag.embeddings.provider import (
EmbeddingProviderOption,
embedding_provider_options,
)
from app.rag.embeddings.resolver import resolve_embed_model
from app.logger import logger
router = APIRouter()
@router.get("/admin/embedding-models/providers/options")
def list_embedding_model_provider_options(
user: CurrentSuperuserDep,
) -> List[EmbeddingProviderOption]:
return embedding_provider_options
@router.get("/admin/embedding-models")
def list_embedding_models(
db_session: SessionDep, user: CurrentSuperuserDep, params: Params = Depends()
) -> Page[EmbeddingModelItem]:
return embedding_model_repo.paginate(db_session, params)
@router.post("/admin/embedding-models/test")
def test_embedding_model(
user: CurrentSuperuserDep,
create: EmbeddingModelCreate,
) -> EmbeddingModelTestResult:
try:
embed_model = resolve_embed_model(
provider=create.provider,
model=create.model,
config=create.config,
credentials=create.credentials,
)
embedding = embed_model.get_query_embedding("Hello, world!")
expected_length = create.vector_dimension
if len(embedding) != expected_length:
raise ValueError(
f"Embedding model is configured with {expected_length} dimensions, but got vector embedding with {len(embedding)} dimensions."
)
success = True
error = ""
except Exception as e:
logger.info(f"Failed to test embedding model: {e}")
success = False
error = str(e)
return EmbeddingModelTestResult(success=success, error=error)
@router.post("/admin/embedding-models")
def create_embedding_model(
db_session: SessionDep,
user: CurrentSuperuserDep,
create: EmbeddingModelCreate,
) -> EmbeddingModelDetail:
return embedding_model_repo.create(db_session, create)
@router.get("/admin/embedding-models/{model_id}")
def get_embedding_model_detail(
db_session: SessionDep, user: CurrentSuperuserDep, model_id: int
) -> EmbeddingModelDetail:
return embedding_model_repo.must_get(db_session, model_id)
@router.put("/admin/embedding-models/{model_id}")
def update_embedding_model(
db_session: SessionDep,
user: CurrentSuperuserDep,
model_id: int,
update: EmbeddingModelUpdate,
) -> EmbeddingModelDetail:
embed_model = embedding_model_repo.must_get(db_session, model_id)
return embedding_model_repo.update(db_session, embed_model, update)
@router.delete("/admin/embedding-models/{model_id}")
def delete_embedding_model(
db_session: SessionDep, user: CurrentSuperuserDep, model_id: int
) -> None:
embedding_model = embedding_model_repo.must_get(db_session, model_id)
embedding_model_repo.delete(db_session, embedding_model)
@router.put("/admin/embedding-models/{model_id}/set_default")
def set_default_embedding_model(
db_session: SessionDep, user: CurrentSuperuserDep, model_id: int
) -> EmbeddingModelDetail:
embed_model = embedding_model_repo.must_get(db_session, model_id)
return embedding_model_repo.set_default(db_session, embed_model)
================================================
FILE: backend/app/api/admin_routes/evaluation/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/evaluation/evaluation_dataset.py
================================================
import pandas as pd
from fastapi import APIRouter, status, HTTPException, Depends
from fastapi_pagination import Page
from fastapi_pagination.ext.sqlmodel import paginate
from sqlmodel import select, desc
from app.api.admin_routes.evaluation.models import (
CreateEvaluationDataset,
UpdateEvaluationDataset,
ModifyEvaluationDatasetItem,
ParamsWithKeyword,
)
from app.api.admin_routes.evaluation.tools import must_get
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.file_storage import default_file_storage
from app.models import Upload, EvaluationDataset, EvaluationDatasetItem
from app.types import MimeTypes
router = APIRouter()
@router.post("/admin/evaluation/datasets")
def create_evaluation_dataset(
evaluation_dataset: CreateEvaluationDataset,
session: SessionDep,
user: CurrentSuperuserDep,
) -> EvaluationDataset:
"""
Create a dataset for a given question and chat engine.
This API depends on the /admin/uploads API to upload the evaluation data.
The evaluation data is expected to be a CSV file with the following columns:
- query: The query to evaluate
- reference: The expected response to the query
You can add more columns to the CSV file, and the extra columns will adhere to the results.
Args:
evaluation_dataset.name: The name of the evaluation dataset.
evaluation_dataset.upload_id: The ID of the uploaded CSV file of the evaluation dataset.
Returns:
True if the evaluation dataset is created successfully.
"""
name = evaluation_dataset.name
evaluation_data_list = []
if evaluation_dataset.upload_id is not None:
# If the evaluation_file_id is provided, validate the uploaded file
evaluation_file_id = evaluation_dataset.upload_id
upload = must_get(session, Upload, evaluation_file_id)
if upload.mime_type != MimeTypes.CSV:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The uploaded file must be a CSV file.",
)
with default_file_storage.open(upload.path) as f:
df = pd.read_csv(f)
# check essential columns
must_have_columns = ["query", "reference"]
if not set(must_have_columns).issubset(df.columns):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"The uploaded file must have the following columns: {must_have_columns}",
)
eval_list = df.to_dict(orient="records")
# create evaluation dataset items
evaluation_data_list = [
EvaluationDatasetItem(
query=item["query"],
reference=item["reference"],
retrieved_contexts=[], # TODO: implement this after we can retrieve contexts
extra={k: item[k] for k in item if k not in must_have_columns},
)
for item in eval_list
]
evaluation_dataset = EvaluationDataset(
name=name,
user_id=user.id,
evaluation_data_list=evaluation_data_list,
)
session.add(evaluation_dataset)
session.commit()
session.refresh(evaluation_dataset)
return evaluation_dataset
@router.delete("/admin/evaluation/datasets/{evaluation_dataset_id}")
def delete_evaluation_dataset(
evaluation_dataset_id: int, session: SessionDep, user: CurrentSuperuserDep
) -> bool:
evaluation_dataset = must_get(session, EvaluationDataset, evaluation_dataset_id)
session.delete(evaluation_dataset)
session.commit()
return True
@router.put("/admin/evaluation/datasets/{evaluation_dataset_id}")
def update_evaluation_dataset(
evaluation_dataset_id: int,
updated_evaluation_dataset: UpdateEvaluationDataset,
session: SessionDep,
user: CurrentSuperuserDep,
) -> EvaluationDataset:
evaluation_dataset = must_get(session, EvaluationDataset, evaluation_dataset_id)
evaluation_dataset.name = updated_evaluation_dataset.name
session.merge(evaluation_dataset)
session.commit()
session.refresh(evaluation_dataset)
return evaluation_dataset
@router.get("/admin/evaluation/datasets")
def list_evaluation_dataset(
session: SessionDep,
user: CurrentSuperuserDep,
params: ParamsWithKeyword = Depends(),
) -> Page[EvaluationDataset]:
stmt = select(EvaluationDataset).order_by(desc(EvaluationDataset.id))
if params.keyword:
stmt = stmt.where(EvaluationDataset.name.ilike(f"%{params.keyword}%"))
return paginate(session, stmt, params)
@router.post("/admin/evaluation/dataset-items")
def create_evaluation_dataset_item(
modify_evaluation_dataset_item: ModifyEvaluationDatasetItem,
session: SessionDep,
user: CurrentSuperuserDep,
) -> EvaluationDatasetItem:
evaluation_dataset_item = EvaluationDatasetItem(
query=modify_evaluation_dataset_item.query,
reference=modify_evaluation_dataset_item.reference,
retrieved_contexts=modify_evaluation_dataset_item.retrieved_contexts,
extra=modify_evaluation_dataset_item.extra,
evaluation_dataset_id=modify_evaluation_dataset_item.evaluation_dataset_id,
)
session.add(evaluation_dataset_item)
session.commit()
session.refresh(evaluation_dataset_item)
return evaluation_dataset_item
@router.delete("/admin/evaluation/dataset-items/{evaluation_dataset_item_id}")
def delete_evaluation_dataset_item(
evaluation_dataset_item_id: int, session: SessionDep, user: CurrentSuperuserDep
) -> bool:
evaluation_dataset_item = must_get(
session, EvaluationDatasetItem, evaluation_dataset_item_id
)
session.delete(evaluation_dataset_item)
session.commit()
return True
@router.put("/admin/evaluation/dataset-items/{evaluation_dataset_item_id}")
def update_evaluation_dataset_item(
evaluation_dataset_item_id: int,
updated_evaluation_dataset_item: ModifyEvaluationDatasetItem,
session: SessionDep,
user: CurrentSuperuserDep,
) -> EvaluationDatasetItem:
evaluation_dataset_item = must_get(
session, EvaluationDatasetItem, evaluation_dataset_item_id
)
evaluation_dataset_item.query = updated_evaluation_dataset_item.query
evaluation_dataset_item.reference = updated_evaluation_dataset_item.reference
evaluation_dataset_item.retrieved_contexts = (
updated_evaluation_dataset_item.retrieved_contexts
)
evaluation_dataset_item.extra = updated_evaluation_dataset_item.extra
evaluation_dataset_item.evaluation_dataset_id = (
updated_evaluation_dataset_item.evaluation_dataset_id
)
session.merge(evaluation_dataset_item)
session.commit()
session.refresh(evaluation_dataset_item)
return evaluation_dataset_item
@router.get("/admin/evaluation/datasets/{evaluation_dataset_id}/dataset-items")
def list_evaluation_dataset_item(
session: SessionDep,
user: CurrentSuperuserDep,
evaluation_dataset_id: int,
params: ParamsWithKeyword = Depends(),
) -> Page[EvaluationDatasetItem]:
stmt = (
select(EvaluationDatasetItem)
.where(EvaluationDatasetItem.evaluation_dataset_id == evaluation_dataset_id)
.order_by(EvaluationDatasetItem.id)
)
if params.keyword:
stmt = stmt.where(EvaluationDatasetItem.query.ilike(f"%{params.keyword}%"))
return paginate(session, stmt, params)
@router.get("/admin/evaluation/dataset-items/{evaluation_dataset_item_id}")
def get_evaluation_dataset_item(
session: SessionDep,
user: CurrentSuperuserDep,
evaluation_dataset_item_id: int,
) -> EvaluationDatasetItem:
return must_get(session, EvaluationDatasetItem, evaluation_dataset_item_id)
================================================
FILE: backend/app/api/admin_routes/evaluation/evaluation_task.py
================================================
import logging
from typing import Optional, List
import sqlmodel
from fastapi import APIRouter, Depends
from fastapi_pagination import Page
from fastapi_pagination.ext.sqlmodel import paginate
from sqlalchemy import func, update
from sqlalchemy.orm import Session
from sqlmodel import select, case, desc
from app.api.admin_routes.evaluation.models import (
CreateEvaluationTask,
EvaluationTaskSummary,
ParamsWithKeyword,
EvaluationTaskOverview,
)
from app.api.admin_routes.evaluation.tools import must_get
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.models import (
EvaluationTask,
EvaluationTaskItem,
EvaluationStatus,
EvaluationDataset,
)
from app.tasks.evaluate import add_evaluation_task
router = APIRouter()
logger = logging.getLogger(__name__)
@router.post("/admin/evaluation/tasks")
def create_evaluation_task(
evaluation_task: CreateEvaluationTask,
session: SessionDep,
user: CurrentSuperuserDep,
) -> Optional[EvaluationTask]:
"""
Create an evaluation task from the evaluation dataset.
Args:
evaluation_task.name: The name of the evaluation task.
evaluation_task.evaluation_dataset_id: The ID of the uploaded evaluation dataset.
evaluation_task.chat_engine: The chat engine to evaluate the queries against. Default is "default".
evaluation_task.run_size: The number of queries to evaluate. Default is None, which means all queries in the CSV file.
Returns:
True if the evaluation task is created successfully.
"""
name = evaluation_task.name
evaluation_dataset_id = evaluation_task.evaluation_dataset_id
chat_engine = evaluation_task.chat_engine
run_size = evaluation_task.run_size
dataset = must_get(session, EvaluationDataset, evaluation_dataset_id)
if run_size is not None and run_size < len(dataset.evaluation_data_list):
dataset.evaluation_data_list = dataset.evaluation_data_list[:run_size]
# create evaluation items
# caveat: Do the deep copy on purpose to avoid the side effect of the original dataset modification
evaluation_task_items = [
EvaluationTaskItem(
status=EvaluationStatus.NOT_START,
chat_engine=chat_engine,
query=item.query,
reference=item.reference,
retrieved_contexts=item.retrieved_contexts,
extra=item.extra,
)
for item in dataset.evaluation_data_list
]
evaluation_task = EvaluationTask(
name=name,
user_id=user.id,
evaluation_task_items=evaluation_task_items,
dataset_id=evaluation_dataset_id,
)
session.add(evaluation_task)
session.commit()
session.refresh(evaluation_task)
add_evaluation_task.delay(evaluation_task.id)
return evaluation_task
@router.delete("/admin/evaluation/tasks/{evaluation_task_id}")
def cancel_evaluation_task(
evaluation_task_id: int, session: SessionDep, user: CurrentSuperuserDep
) -> Optional[bool]:
must_get(session, EvaluationTask, evaluation_task_id)
session.exec(
update(EvaluationTaskItem)
.where(EvaluationTaskItem.evaluation_task_id == evaluation_task_id)
.values(status=EvaluationStatus.CANCEL)
)
session.commit()
return True
@router.get("/admin/evaluation/tasks/{evaluation_task_id}")
def get_evaluation_task(
session: SessionDep,
user: CurrentSuperuserDep,
evaluation_task_id: int,
) -> EvaluationTask:
return must_get(session, EvaluationTask, evaluation_task_id)
@router.get("/admin/evaluation/tasks/{evaluation_task_id}/summary")
def get_evaluation_task_summary(
evaluation_task_id: int, session: SessionDep, user: CurrentSuperuserDep
) -> EvaluationTaskSummary:
task = must_get(session, EvaluationTask, evaluation_task_id)
return get_summary_for_evaluation_task(task, session)
@router.get("/admin/evaluation/tasks")
def list_evaluation_task(
session: SessionDep,
user: CurrentSuperuserDep,
params: ParamsWithKeyword = Depends(),
) -> Page[EvaluationTaskSummary]:
stmt = select(EvaluationTask).order_by(desc(EvaluationTask.id))
if params.keyword:
stmt = stmt.where(EvaluationTask.name.ilike(f"%{params.keyword}%"))
task_page: Page[EvaluationTask] = paginate(session, stmt, params)
summaries: List[EvaluationTaskSummary] = []
for task in task_page.items:
summaries.append(get_summary_for_evaluation_task(task, session))
return Page[EvaluationTaskSummary](
items=summaries,
total=task_page.total,
page=task_page.page,
size=task_page.size,
pages=task_page.pages,
)
@router.get("/admin/evaluation/tasks/{evaluation_task_id}/items")
def list_evaluation_task_items(
evaluation_task_id: int,
session: SessionDep,
user: CurrentSuperuserDep,
params: ParamsWithKeyword = Depends(),
) -> Page[EvaluationTaskItem]:
must_get(session, EvaluationTask, evaluation_task_id)
stmt = select(EvaluationTaskItem).where(
EvaluationTaskItem.evaluation_task_id == evaluation_task_id
)
if params.keyword:
stmt = stmt.where(
sqlmodel.or_(
EvaluationTaskItem.query.ilike(f"%{params.keyword}%"),
EvaluationTaskItem.reference.ilike(f"%{params.keyword}%"),
)
)
stmt.order_by(EvaluationTaskItem.id)
return paginate(session, stmt, params)
def get_summary_for_evaluation_task(
evaluation_task: EvaluationTask, session: Session
) -> EvaluationTaskSummary:
status_counts = (
session.query(
func.count(
case(
(EvaluationTaskItem.status == EvaluationStatus.NOT_START, 1),
else_=None,
)
).label("not_start"),
func.count(
case(
(EvaluationTaskItem.status == EvaluationStatus.EVALUATING, 1),
else_=None,
)
).label("evaluating"),
func.count(
case(
(EvaluationTaskItem.status == EvaluationStatus.DONE, 1), else_=None
)
).label("done"),
func.count(
case(
(EvaluationTaskItem.status == EvaluationStatus.ERROR, 1), else_=None
)
).label("error"),
func.count(
case(
(EvaluationTaskItem.status == EvaluationStatus.CANCEL, 1),
else_=None,
)
).label("cancel"),
)
.filter(EvaluationTaskItem.evaluation_task_id == evaluation_task.id)
.one()
)
stats = {}
if status_counts.not_start == 0 and status_counts.evaluating == 0:
stats_tuple = (
session.query(
func.avg(EvaluationTaskItem.factual_correctness).label(
"avg_factual_correctness"
),
func.avg(EvaluationTaskItem.semantic_similarity).label(
"avg_semantic_similarity"
),
func.min(EvaluationTaskItem.factual_correctness).label(
"min_factual_correctness"
),
func.min(EvaluationTaskItem.semantic_similarity).label(
"min_semantic_similarity"
),
func.max(EvaluationTaskItem.factual_correctness).label(
"max_factual_correctness"
),
func.max(EvaluationTaskItem.semantic_similarity).label(
"max_semantic_similarity"
),
func.stddev(EvaluationTaskItem.factual_correctness).label(
"std_factual_correctness"
),
func.stddev(EvaluationTaskItem.semantic_similarity).label(
"std_semantic_similarity"
),
)
.filter(
EvaluationTaskItem.evaluation_task_id == evaluation_task.id,
EvaluationTaskItem.status == EvaluationStatus.DONE,
EvaluationTaskItem.factual_correctness.isnot(None),
EvaluationTaskItem.semantic_similarity.isnot(None),
)
.one()
)
stats = dict(stats_tuple._mapping)
logger.info(stats)
return EvaluationTaskSummary(
summary=EvaluationTaskOverview(
not_start=status_counts.not_start,
succeed=status_counts.done,
errored=status_counts.error,
progressing=status_counts.evaluating,
cancel=status_counts.cancel,
avg_factual_correctness=stats.get("avg_factual_correctness", 0),
avg_semantic_similarity=stats.get("avg_semantic_similarity", 0),
min_factual_correctness=stats.get("min_factual_correctness", 0),
min_semantic_similarity=stats.get("min_semantic_similarity", 0),
max_factual_correctness=stats.get("max_factual_correctness", 0),
max_semantic_similarity=stats.get("max_semantic_similarity", 0),
std_factual_correctness=stats.get("std_factual_correctness", 0),
std_semantic_similarity=stats.get("std_semantic_similarity", 0),
),
**evaluation_task.model_dump(),
)
================================================
FILE: backend/app/api/admin_routes/evaluation/models.py
================================================
from typing import Optional
from uuid import UUID
from datetime import datetime
from fastapi_pagination import Params
from pydantic import BaseModel
class CreateEvaluationTask(BaseModel):
name: str
evaluation_dataset_id: int
chat_engine: str = "default"
run_size: Optional[int] = None
class EvaluationTaskOverview(BaseModel):
not_start: int
succeed: int
errored: int
progressing: int
cancel: int
avg_factual_correctness: Optional[float]
avg_semantic_similarity: Optional[float]
min_factual_correctness: Optional[float]
min_semantic_similarity: Optional[float]
max_factual_correctness: Optional[float]
max_semantic_similarity: Optional[float]
std_factual_correctness: Optional[float]
std_semantic_similarity: Optional[float]
class EvaluationTaskSummary(BaseModel):
id: Optional[int]
name: str
user_id: UUID
dataset_id: int
created_at: Optional[datetime]
updated_at: Optional[datetime]
summary: EvaluationTaskOverview
class UpdateEvaluationDataset(BaseModel):
name: str
class CreateEvaluationDataset(BaseModel):
name: str
upload_id: Optional[int] = None
class ModifyEvaluationDatasetItem(BaseModel):
query: str
reference: str
retrieved_contexts: list[str]
extra: dict
evaluation_dataset_id: int
class ParamsWithKeyword(Params):
keyword: Optional[str] = None
================================================
FILE: backend/app/api/admin_routes/evaluation/tools.py
================================================
from typing import TypeVar, Type
from fastapi import status, HTTPException
from sqlmodel import SQLModel, Session
T = TypeVar("T", bound=SQLModel)
def must_get(session: Session, model: Type[T], item_id: int) -> T:
item = session.get(model, item_id)
if not item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"{model.__name__} with ID {item_id} not found",
)
return item
def must_get_and_belong(
session: Session, model: Type[T], item_id: int, user_id: int
) -> T:
item = must_get(session, model, item_id)
if not hasattr(item, "user_id"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"{model.__name__} does not have a 'user_id' field",
)
if item.user_id != user_id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"{model.__name__} with ID {item_id} does not belong to user {user_id}",
)
return item
================================================
FILE: backend/app/api/admin_routes/feedback.py
================================================
from typing import Annotated, Optional
from fastapi import APIRouter, Depends, Query
from fastapi_pagination import Params, Page
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.models import AdminFeedbackPublic, FeedbackFilters
from app.models.feedback import FeedbackOrigin
from app.repositories import feedback_repo
router = APIRouter(
prefix="/admin/feedbacks",
tags=["admin/feedback"],
)
@router.get("/")
def list_feedbacks(
session: SessionDep,
user: CurrentSuperuserDep,
filters: Annotated[FeedbackFilters, Query()],
params: Params = Depends(),
) -> Page[AdminFeedbackPublic]:
return feedback_repo.paginate(
session=session,
filters=filters,
params=params,
)
@router.get("/origins")
def list_feedback_origins(
session: SessionDep,
user: CurrentSuperuserDep,
search: Optional[str] = None,
params: Params = Depends(),
) -> Page[FeedbackOrigin]:
return feedback_repo.list_feedback_origins(session, search, params)
================================================
FILE: backend/app/api/admin_routes/knowledge_base/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/knowledge_base/chunk/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/knowledge_base/chunk/models.py
================================================
from pydantic import BaseModel
from app.rag.retrievers.chunk.schema import VectorSearchRetrieverConfig
class KBChunkRetrievalConfig(BaseModel):
vector_search: VectorSearchRetrieverConfig
# TODO: add fulltext and knowledge graph search config
class KBRetrieveChunksRequest(BaseModel):
query: str
retrieval_config: KBChunkRetrievalConfig
================================================
FILE: backend/app/api/admin_routes/knowledge_base/chunk/routes.py
================================================
import logging
from fastapi import APIRouter
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.rag.retrievers.chunk.simple_retriever import (
ChunkSimpleRetriever,
)
from app.rag.retrievers.chunk.schema import ChunksRetrievalResult
from app.exceptions import InternalServerError, KBNotFound
from .models import KBRetrieveChunksRequest
router = APIRouter()
logger = logging.getLogger(__name__)
@router.post("/admin/knowledge_base/{kb_id}/chunks/retrieve")
def retrieve_chunks(
db_session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
request: KBRetrieveChunksRequest,
) -> ChunksRetrievalResult:
try:
vector_search_config = request.retrieval_config.vector_search
retriever = ChunkSimpleRetriever(
db_session=db_session,
knowledge_base_id=kb_id,
config=vector_search_config,
)
return retriever.retrieve_chunks(
request.query,
)
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
================================================
FILE: backend/app/api/admin_routes/knowledge_base/data_source/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/knowledge_base/data_source/models.py
================================================
from pydantic import BaseModel, field_validator
from app.models import DataSourceType
class KBDataSource(BaseModel):
"""
Represents a linked data source for a knowledge base.
"""
id: int
name: str
data_source_type: DataSourceType
config: dict | list
class KBDataSourceMutable(BaseModel):
name: str
@field_validator("name")
def name_must_not_be_blank(cls, v: str) -> str:
if not v.strip():
raise ValueError("Please provide a name for the data source")
return v
class KBDataSourceCreate(KBDataSourceMutable):
data_source_type: DataSourceType
config: dict | list
class KBDataSourceUpdate(KBDataSourceMutable):
pass
================================================
FILE: backend/app/api/admin_routes/knowledge_base/data_source/routes.py
================================================
import logging
from fastapi import APIRouter, Depends
from fastapi_pagination import Params, Page
from app.api.admin_routes.knowledge_base.data_source.models import (
KBDataSourceUpdate,
KBDataSource,
)
from app.api.admin_routes.knowledge_base.models import KBDataSourceCreate
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.exceptions import InternalServerError, KBDataSourceNotFound, KBNotFound
from app.models import DataSource
from app.repositories import knowledge_base_repo
from app.tasks.knowledge_base import (
import_documents_from_kb_datasource,
purge_kb_datasource_related_resources,
)
router = APIRouter()
logger = logging.getLogger(__name__)
@router.post("/admin/knowledge_bases/{kb_id}/datasources")
def create_kb_datasource(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
create: KBDataSourceCreate,
) -> KBDataSource:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
new_data_source = DataSource(
name=create.name,
description="",
data_source_type=create.data_source_type,
config=create.config,
)
new_data_source = knowledge_base_repo.add_kb_datasource(
session, kb, new_data_source
)
import_documents_from_kb_datasource.delay(kb_id, new_data_source.id)
return new_data_source
except KBNotFound as e:
raise e
except Exception as e:
logger.error(
f"Failed to create data source for knowledge base #{kb_id}: {e}", exc_info=e
)
raise InternalServerError()
@router.put("/admin/knowledge_bases/{kb_id}/datasources/{data_source_id}")
def update_kb_datasource(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
data_source_id: int,
update: KBDataSourceUpdate,
) -> KBDataSource:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
data_source = kb.must_get_data_source_by_id(data_source_id)
data_source.name = update.name
session.add(data_source)
session.commit()
session.refresh(data_source)
return data_source
except KBNotFound as e:
raise e
except KBDataSourceNotFound as e:
raise e
except Exception as e:
logger.error(f"Failed to update data source #{data_source_id}: {e}", exc_info=e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/datasources/{data_source_id}")
def get_kb_datasource(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
data_source_id: int,
) -> KBDataSource:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
return kb.must_get_data_source_by_id(data_source_id)
except KBNotFound as e:
raise e
except KBDataSourceNotFound as e:
raise e
except Exception as e:
logger.error(f"Failed to get data source #{data_source_id}: {e}", exc_info=e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/datasources")
def list_kb_datasources(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
params: Params = Depends(),
) -> Page[KBDataSource]:
return knowledge_base_repo.list_kb_datasources(session, kb_id, params)
@router.delete("/admin/knowledge_bases/{kb_id}/datasources/{data_source_id}")
def remove_kb_datasource(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
data_source_id: int,
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
data_source = kb.must_get_data_source_by_id(data_source_id)
# Flag the data source to be deleted, it will be deleted completely by the background job.
knowledge_base_repo.remove_kb_datasource(session, kb, data_source)
session.commit()
purge_kb_datasource_related_resources.apply_async(
args=[kb_id, data_source_id], countdown=5
)
return {"detail": "success"}
except KBNotFound as e:
raise e
except KBDataSourceNotFound as e:
raise e
except Exception as e:
logger.error(
f"Failed to remove data source #{data_source_id} from knowledge base #{kb_id}: {e}",
exc_info=e,
)
raise InternalServerError()
================================================
FILE: backend/app/api/admin_routes/knowledge_base/document/models.py
================================================
from datetime import datetime
from typing import Optional
from uuid import UUID
from pydantic import BaseModel, Field
from app.api.admin_routes.models import DataSourceDescriptor, KnowledgeBaseDescriptor
from app.models import DocIndexTaskStatus
from app.types import MimeTypes
class DocumentFilters(BaseModel):
search: Optional[str] = Field(
description="The search string to filter documents by name or source URI.",
default=None,
)
knowledge_base_id: Optional[int] = Field(
description="The knowledge base ID that the document belongs to.",
default=None,
)
data_source_id: Optional[int] = Field(
description="The data source ID that the document belongs to.",
default=None,
)
mime_type: Optional[MimeTypes] = Field(
description="The MIME type of the documents to filter by.",
default=None,
)
index_status: Optional[DocIndexTaskStatus] = Field(
description="The status of the document index task to filter by.",
default=None,
)
created_at: Optional[tuple[datetime, datetime]] = Field(
description="The time range when the document was created.",
default=None,
)
updated_at: Optional[tuple[datetime, datetime]] = Field(
description="The time range when the document was last updated.",
default=None,
)
last_modified_at: Optional[tuple[datetime, datetime]] = Field(
description="The time range when the document was last modified in the source system.",
default=None,
)
class DocumentItem(BaseModel):
id: int
hash: str
name: str
content: str
mime_type: MimeTypes | None
source_uri: str | None
meta: dict | list | None
index_status: DocIndexTaskStatus | None
index_result: str | None
data_source: DataSourceDescriptor | None
knowledge_base: KnowledgeBaseDescriptor | None
last_modified_at: datetime
created_at: datetime
updated_at: datetime
class RebuildIndexResult(BaseModel):
reindex_document_ids: list[int] = Field(default_factory=list)
ignore_document_ids: list[int] = Field(default_factory=list)
reindex_chunk_ids: list[UUID] = Field(default_factory=list)
ignore_chunk_ids: list[UUID] = Field(default_factory=list)
================================================
FILE: backend/app/api/admin_routes/knowledge_base/document/routes.py
================================================
import logging
from typing import Annotated
from fastapi import APIRouter, Depends, Query, HTTPException
from fastapi_pagination import Params, Page
from sqlmodel import Session
from app.api.admin_routes.knowledge_base.models import ChunkItem
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.models import Document
from app.models.chunk import KgIndexStatus, get_kb_chunk_model
from app.models.document import DocIndexTaskStatus
from app.models.entity import get_kb_entity_model
from app.models.relationship import get_kb_relationship_model
from app.repositories import knowledge_base_repo, document_repo
from app.repositories.chunk import ChunkRepo
from app.api.admin_routes.knowledge_base.document.models import (
DocumentFilters,
DocumentItem,
RebuildIndexResult,
)
from app.exceptions import InternalServerError
from app.repositories.graph import GraphRepo
from app.tasks.build_index import build_index_for_document, build_kg_index_for_chunk
from app.tasks.knowledge_base import stats_for_knowledge_base
router = APIRouter()
logger = logging.getLogger(__name__)
@router.get("/admin/knowledge_bases/{kb_id}/documents")
def list_kb_documents(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
filters: Annotated[DocumentFilters, Query()],
params: Params = Depends(),
) -> Page[DocumentItem]:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
filters.knowledge_base_id = kb.id
return document_repo.paginate(
session=session,
filters=filters,
params=params,
)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/documents/{doc_id}")
def get_kb_document_by_id(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
doc_id: int,
) -> Document:
try:
document = document_repo.must_get(session, doc_id)
assert document.knowledge_base_id == kb_id
return document
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/documents/{doc_id}/chunks")
def list_kb_document_chunks(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
doc_id: int,
) -> list[ChunkItem]:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
chunk_repo = ChunkRepo(get_kb_chunk_model(kb))
return chunk_repo.get_document_chunks(session, doc_id)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.delete("/admin/knowledge_bases/{kb_id}/documents/{document_id}")
def remove_kb_document(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
document_id: int,
) -> RebuildIndexResult:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
doc = document_repo.must_get(session, document_id)
assert doc.knowledge_base_id == kb.id
chunk_model = get_kb_chunk_model(kb)
entity_model = get_kb_entity_model(kb)
relationship_model = get_kb_relationship_model(kb)
chunk_repo = ChunkRepo(chunk_model)
graph_repo = GraphRepo(entity_model, relationship_model, chunk_model)
graph_repo.delete_document_relationships(session, document_id)
logger.info(
f"Deleted relationships generated by document #{document_id} successfully."
)
graph_repo.delete_orphaned_entities(session)
logger.info("Deleted orphaned entities successfully.")
chunk_repo.delete_by_document(session, document_id)
logger.info(f"Deleted chunks of document #{document_id} successfully.")
session.delete(doc)
session.commit()
stats_for_knowledge_base.delay(kb_id)
return {"detail": "success"}
except HTTPException:
raise
except Exception as e:
logger.exception(f"Failed to remove document #{document_id}: {e}")
raise InternalServerError()
@router.post("/admin/knowledge_bases/{kb_id}/documents/reindex")
def rebuild_kb_documents_index(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
document_ids: list[int],
reindex_completed_task: bool = False,
):
try:
return rebuild_kb_document_index_by_ids(
session, kb_id, document_ids, reindex_completed_task
)
except HTTPException:
raise
except Exception as e:
logger.exception(e, exc_info=True)
raise InternalServerError()
@router.post("/admin/knowledge_bases/{kb_id}/documents/{doc_id}/reindex")
def rebuild_kb_document_index(
db_session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
doc_id: int,
reindex_completed_task: bool = False,
) -> RebuildIndexResult:
try:
document_ids = [doc_id]
return rebuild_kb_document_index_by_ids(
db_session, kb_id, document_ids, reindex_completed_task
)
except HTTPException:
raise
except Exception as e:
logger.exception(e, exc_info=True)
raise InternalServerError()
def rebuild_kb_document_index_by_ids(
db_session: Session,
kb_id: int,
document_ids: list[int],
reindex_completed_task: bool = False,
) -> RebuildIndexResult:
kb = knowledge_base_repo.must_get(db_session, kb_id)
kb_chunk_repo = ChunkRepo(get_kb_chunk_model(kb))
# Retry failed vector index tasks.
documents = document_repo.fetch_by_ids(db_session, document_ids)
reindex_document_ids = []
ignore_document_ids = []
for doc in documents:
# TODO: check NOT_STARTED, PENDING, RUNNING
if doc.index_status != DocIndexTaskStatus.FAILED and not reindex_completed_task:
ignore_document_ids.append(doc.id)
else:
reindex_document_ids.append(doc.id)
doc.index_status = DocIndexTaskStatus.PENDING
db_session.add(doc)
db_session.commit()
build_index_for_document.delay(kb.id, doc.id)
# Retry failed kg index tasks.
chunks = kb_chunk_repo.fetch_by_document_ids(db_session, document_ids)
reindex_chunk_ids = []
ignore_chunk_ids = []
for chunk in chunks:
if chunk.index_status == KgIndexStatus.COMPLETED and not reindex_completed_task:
ignore_chunk_ids.append(chunk.id)
continue
else:
reindex_chunk_ids.append(chunk.id)
chunk.index_status = KgIndexStatus.PENDING
db_session.add(chunk)
db_session.commit()
build_kg_index_for_chunk.delay(kb.id, chunk.id)
return RebuildIndexResult(
reindex_document_ids=reindex_document_ids,
ignore_document_ids=ignore_document_ids,
reindex_chunk_ids=reindex_chunk_ids,
ignore_chunk_ids=ignore_chunk_ids,
)
================================================
FILE: backend/app/api/admin_routes/knowledge_base/graph/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/knowledge_base/graph/knowledge/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/knowledge_base/graph/knowledge/routes.py
================================================
from fastapi import HTTPException
from starlette import status
from app.api.admin_routes.knowledge_base.graph.models import (
KnowledgeRequest,
KnowledgeNeighborRequest,
KnowledgeChunkRequest,
)
from app.api.admin_routes.knowledge_base.graph.routes import router, logger
from app.api.deps import SessionDep
from app.exceptions import KBNotFound, InternalServerError
from app.rag.knowledge_base.index_store import get_kb_tidb_graph_store
from app.repositories import knowledge_base_repo
# Experimental interface
@router.post("/admin/knowledge_bases/{kb_id}/graph/knowledge")
def retrieve_knowledge(session: SessionDep, kb_id: int, request: KnowledgeRequest):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
graph_store = get_kb_tidb_graph_store(session, kb)
data = graph_store.retrieve_graph_data(
request.query,
request.top_k,
request.similarity_threshold,
)
return {
"entities": data["entities"],
"relationships": data["relationships"],
}
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.post("/admin/knowledge_bases/{kb_id}/graph/knowledge/neighbors")
def retrieve_knowledge_neighbors(
session: SessionDep, kb_id: int, request: KnowledgeNeighborRequest
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
graph_store = get_kb_tidb_graph_store(session, kb)
data = graph_store.retrieve_neighbors(
request.entities_ids,
request.query,
request.max_depth,
request.max_neighbors,
request.similarity_threshold,
)
return data
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.post("/admin/knowledge_bases/{kb_id}/graph/knowledge/chunks")
def retrieve_knowledge_chunks(
session: SessionDep, kb_id: int, request: KnowledgeChunkRequest
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
graph_store = get_kb_tidb_graph_store(session, kb)
data = graph_store.get_chunks_by_relationships(request.relationships_ids)
if not data:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="No chunks found for the given relationships",
)
return data
except KBNotFound as e:
raise e
except HTTPException as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
================================================
FILE: backend/app/api/admin_routes/knowledge_base/graph/models.py
================================================
from typing import List, Optional
from pydantic import BaseModel, model_validator
from app.rag.retrievers.knowledge_graph.schema import (
KnowledgeGraphRetrieverConfig,
)
class SynopsisEntityCreate(BaseModel):
name: str
description: str
topic: str
meta: dict
entities: List[int]
@model_validator(mode="after")
def validate_entities(self):
if len(self.entities) == 0:
raise ValueError("Entities list should not be empty")
return self
class EntityUpdate(BaseModel):
name: Optional[str] = None
description: Optional[str] = None
meta: Optional[dict] = None
class RelationshipUpdate(BaseModel):
description: Optional[str] = None
meta: Optional[dict] = None
weight: Optional[int] = None
class GraphSearchRequest(BaseModel):
query: str
include_meta: bool = True
depth: int = 2
with_degree: bool = True
relationship_meta_filters: dict = {}
# Knowledge Graph Retrieval
class KBKnowledgeGraphRetrievalConfig(BaseModel):
knowledge_graph: KnowledgeGraphRetrieverConfig
class KBRetrieveKnowledgeGraphRequest(BaseModel):
query: str
llm_id: int
retrieval_config: KBKnowledgeGraphRetrievalConfig
### Experimental
class KnowledgeRequest(BaseModel):
query: str
similarity_threshold: float = 0.55
top_k: int = 10
class KnowledgeNeighborRequest(BaseModel):
entities_ids: List[int]
query: str
max_depth: int = 1
max_neighbors: int = 20
similarity_threshold: float = 0.55
class KnowledgeChunkRequest(BaseModel):
relationships_ids: List[int]
================================================
FILE: backend/app/api/admin_routes/knowledge_base/graph/routes.py
================================================
import logging
from typing import List
import json
from fastapi import APIRouter, HTTPException, status
from fastapi.responses import StreamingResponse
from fastapi.encoders import jsonable_encoder
from app.api.admin_routes.knowledge_base.graph.models import (
SynopsisEntityCreate,
EntityUpdate,
RelationshipUpdate,
KBRetrieveKnowledgeGraphRequest,
GraphSearchRequest,
)
from app.api.deps import SessionDep
from app.exceptions import KBNotFound, InternalServerError
from app.models import (
EntityPublic,
RelationshipPublic,
)
from app.rag.retrievers.knowledge_graph.schema import (
KnowledgeGraphRetrievalResult,
)
from app.rag.knowledge_base.index_store import (
get_kb_tidb_graph_editor,
get_kb_tidb_graph_store,
)
from app.rag.retrievers.knowledge_graph.simple_retriever import (
KnowledgeGraphSimpleRetriever,
)
from app.repositories import knowledge_base_repo
router = APIRouter()
logger = logging.getLogger(__name__)
@router.get(
"/admin/knowledge_bases/{kb_id}/graph/entities/search",
response_model=List[EntityPublic],
)
def search_similar_entities(
session: SessionDep, kb_id: int, query: str, top_k: int = 10
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
return tidb_graph_editor.search_similar_entities(session, query, top_k)
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.post(
"/admin/knowledge_bases/{kb_id}/graph/entities/synopsis",
response_model=EntityPublic,
)
def create_synopsis_entity(
session: SessionDep, kb_id: int, request: SynopsisEntityCreate
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
return tidb_graph_editor.create_synopsis_entity(
session,
request.name,
request.description,
request.topic,
request.meta,
request.entities,
)
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.get(
"/admin/knowledge_bases/{kb_id}/graph/entities/{entity_id}",
response_model=EntityPublic,
)
def get_entity(session: SessionDep, kb_id: int, entity_id: int):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
entity = tidb_graph_editor.get_entity(session, entity_id)
if not entity:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Entity not found",
)
return entity
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.put(
"/admin/knowledge_bases/{kb_id}/graph/entities/{entity_id}",
response_model=EntityPublic,
)
def update_entity(
session: SessionDep, kb_id: int, entity_id: int, entity_update: EntityUpdate
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
old_entity = tidb_graph_editor.get_entity(session, entity_id)
if old_entity is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Entity not found",
)
entity = tidb_graph_editor.update_entity(
session, old_entity, entity_update.model_dump()
)
return entity
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.get("/admin/knowledge_bases/{kb_id}/graph/entities/{entity_id}/subgraph")
def get_entity_subgraph(session: SessionDep, kb_id: int, entity_id: int) -> dict:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
entity = tidb_graph_editor.get_entity(session, entity_id)
if entity is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Entity not found",
)
relationships, entities = tidb_graph_editor.get_entity_subgraph(session, entity)
return {
"relationships": relationships,
"entities": entities,
}
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get(
"/admin/knowledge_bases/{kb_id}/graph/relationships/{relationship_id}",
response_model=RelationshipPublic,
)
def get_relationship(session: SessionDep, kb_id: int, relationship_id: int):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
relationship = tidb_graph_editor.get_relationship(session, relationship_id)
if relationship is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Relationship not found",
)
return relationship
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.put(
"/admin/knowledge_bases/{kb_id}/graph/relationships/{relationship_id}",
response_model=RelationshipPublic,
)
def update_relationship(
session: SessionDep,
kb_id: int,
relationship_id: int,
relationship_update: RelationshipUpdate,
):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
tidb_graph_editor = get_kb_tidb_graph_editor(session, kb)
old_relationship = tidb_graph_editor.get_relationship(session, relationship_id)
if old_relationship is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Relationship not found",
)
relationship = tidb_graph_editor.update_relationship(
session, old_relationship, relationship_update.model_dump()
)
return relationship
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.post("/admin/knowledge_bases/{kb_id}/graph/retrieve")
def retrieve_kb_knowledge_graph(
db_session: SessionDep, kb_id: int, request: KBRetrieveKnowledgeGraphRequest
) -> KnowledgeGraphRetrievalResult:
try:
retriever = KnowledgeGraphSimpleRetriever(
db_session=db_session,
knowledge_base_id=kb_id,
config=request.retrieval_config.knowledge_graph,
)
knowledge_graph = retriever.retrieve_knowledge_graph(request.query)
return KnowledgeGraphRetrievalResult(
entities=knowledge_graph.entities,
relationships=knowledge_graph.relationships,
)
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.post("/admin/knowledge_bases/{kb_id}/graph/search", deprecated=True)
def legacy_search_graph(session: SessionDep, kb_id: int, request: GraphSearchRequest):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
graph_store = get_kb_tidb_graph_store(session, kb)
entities, relationships = graph_store.retrieve_with_weight(
request.query,
[],
request.depth,
request.include_meta,
request.with_degree,
request.relationship_meta_filters,
)
return {
"entities": entities,
"relationships": relationships,
}
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.post("/admin/knowledge_bases/{kb_id}/graph/entire_graph")
def get_entire_knowledge_graph(session: SessionDep, kb_id: int):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
graph_store = get_kb_tidb_graph_store(session, kb)
retrieved_kg = graph_store.get_entire_knowledge_graph()
return {
"entities": retrieved_kg.entities,
"relationships": retrieved_kg.relationships,
}
except KBNotFound as e:
raise e
except Exception as e:
# TODO: throw InternalServerError
raise e
@router.get("/admin/knowledge_bases/{kb_id}/graph/entire_graph/stream")
def stream_entire_knowledge_graph(session: SessionDep, kb_id: int):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
graph_store = get_kb_tidb_graph_store(session, kb)
def generate():
for chunk in graph_store.stream_entire_knowledge_graph(chunk_size=5000):
yield f"data: {json.dumps(jsonable_encoder(chunk))}\n\n"
yield f"data: {json.dumps({'type': 'complete'})}\n\n"
return StreamingResponse(
generate(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Access-Control-Allow-Origin": "*",
}
)
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
================================================
FILE: backend/app/api/admin_routes/knowledge_base/models.py
================================================
from datetime import datetime
from typing import Optional
from uuid import UUID
from pydantic import BaseModel, field_validator, Field
from app.api.admin_routes.knowledge_base.data_source.models import (
KBDataSource,
KBDataSourceCreate,
)
from app.api.admin_routes.models import (
EmbeddingModelDescriptor,
LLMDescriptor,
UserDescriptor,
)
from app.exceptions import KBNoVectorIndexConfigured
from app.models import KgIndexStatus
from app.models.knowledge_base import IndexMethod, GeneralChunkingConfig, ChunkingConfig
class KnowledgeBaseCreate(BaseModel):
name: str
description: Optional[str] = None
index_methods: list[IndexMethod] = Field(
default_factory=lambda: [IndexMethod.VECTOR]
)
llm_id: Optional[int] = None
embedding_model_id: Optional[int] = None
chunking_config: ChunkingConfig = Field(default_factory=GeneralChunkingConfig)
data_sources: list[KBDataSourceCreate] = Field(default_factory=list)
@field_validator("name")
def name_must_not_be_blank(cls, v: str) -> str:
if not v.strip():
raise ValueError("Please provide a name for the knowledge base")
return v
@field_validator("index_methods")
def index_methods_must_has_vector(cls, v: list[IndexMethod]) -> list[IndexMethod]:
# Notice: For now, knowledge base must be configured vector index method,
# we will remove this limit in the feature.
if IndexMethod.VECTOR not in v:
raise KBNoVectorIndexConfigured()
return v
class KnowledgeBaseUpdate(BaseModel):
name: Optional[str] = None
description: Optional[str] = None
chunking_config: Optional[ChunkingConfig] = None
class KnowledgeBaseDetail(BaseModel):
"""
Represents a detailed view of a knowledge base.
"""
id: int
name: str
description: Optional[str] = None
documents_total: int
data_sources_total: int
# Notice: By default, SQLModel will not serialize list type relationships.
# https://github.com/fastapi/sqlmodel/issues/37#issuecomment-2093607242
data_sources: list[KBDataSource]
chunking_config: Optional[ChunkingConfig] = None
index_methods: list[IndexMethod]
llm_id: int | None = None
llm: LLMDescriptor | None = None
embedding_model_id: int | None = None
embedding_model: EmbeddingModelDescriptor | None = None
creator: UserDescriptor | None = None
created_at: datetime | None = None
updated_at: datetime | None = None
class KnowledgeBaseItem(BaseModel):
"""
Represents a simplified view of a knowledge base for list display purposes.
"""
id: int
name: str
description: Optional[str] = None
documents_total: int
data_sources_total: int
index_methods: list[IndexMethod]
creator: UserDescriptor | None = None
created_at: datetime
updated_at: datetime
class VectorIndexError(BaseModel):
document_id: int
document_name: str
source_uri: str
error: str | None = None
class KGIndexError(BaseModel):
document_id: int
document_name: str
source_uri: str
chunk_id: UUID
error: str | None = None
class ChunkItem(BaseModel):
id: UUID
document_id: int
hash: str
text: str
meta: Optional[dict | list]
embedding: Optional[list[float]]
relations: Optional[dict | list]
source_uri: Optional[str]
index_status: Optional[KgIndexStatus]
index_result: Optional[str]
created_at: Optional[datetime]
updated_at: Optional[datetime]
class RetrievalRequest(BaseModel):
query: str
chat_engine: str = "default"
top_k: Optional[int] = 5
================================================
FILE: backend/app/api/admin_routes/knowledge_base/routes.py
================================================
import logging
from fastapi import APIRouter, Depends, HTTPException
from fastapi_pagination import Params, Page
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.rag.knowledge_base.index_store import (
init_kb_tidb_vector_store,
init_kb_tidb_graph_store,
)
from .models import (
KnowledgeBaseDetail,
KnowledgeBaseItem,
KnowledgeBaseCreate,
KnowledgeBaseUpdate,
VectorIndexError,
KGIndexError,
)
from app.exceptions import (
InternalServerError,
KBIsUsedByChatEngines,
)
from app.models import (
DataSource,
KnowledgeBase,
)
from app.repositories import (
embedding_model_repo,
llm_repo,
data_source_repo,
knowledge_base_repo,
)
from app.tasks import (
build_kg_index_for_chunk,
build_index_for_document,
)
from app.tasks.knowledge_base import (
import_documents_for_knowledge_base,
stats_for_knowledge_base,
purge_knowledge_base_related_resources,
)
from ..models import ChatEngineDescriptor
router = APIRouter()
logger = logging.getLogger(__name__)
@router.post("/admin/knowledge_bases")
def create_knowledge_base(
session: SessionDep, user: CurrentSuperuserDep, create: KnowledgeBaseCreate
) -> KnowledgeBaseDetail:
try:
data_sources = [
data_source_repo.create(
session,
DataSource(
name=data_source.name,
description="",
user_id=user.id,
data_source_type=data_source.data_source_type,
config=data_source.config,
),
)
for data_source in create.data_sources
]
if not create.llm_id:
create.llm_id = llm_repo.must_get_default(session).id
if not create.embedding_model_id:
create.embedding_model_id = embedding_model_repo.must_get_default(
session
).id
knowledge_base = KnowledgeBase(
name=create.name,
description=create.description,
index_methods=create.index_methods,
llm_id=create.llm_id,
embedding_model_id=create.embedding_model_id,
chunking_config=create.chunking_config.model_dump(),
data_sources=data_sources,
created_by=user.id,
updated_by=user.id,
)
knowledge_base = knowledge_base_repo.create(session, knowledge_base)
# Ensure the knowledge-base corresponding table schema are initialized.
init_kb_tidb_vector_store(session, knowledge_base)
init_kb_tidb_graph_store(session, knowledge_base)
# Trigger import and index documents for knowledge base
import_documents_for_knowledge_base.delay(knowledge_base.id)
return knowledge_base
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases")
def list_knowledge_bases(
session: SessionDep,
user: CurrentSuperuserDep,
params: Params = Depends(),
) -> Page[KnowledgeBaseItem]:
return knowledge_base_repo.paginate(session, params)
@router.get("/admin/knowledge_bases/{knowledge_base_id}")
def get_knowledge_base(
session: SessionDep,
user: CurrentSuperuserDep,
knowledge_base_id: int,
) -> KnowledgeBaseDetail:
try:
return knowledge_base_repo.must_get(session, knowledge_base_id)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.put("/admin/knowledge_bases/{knowledge_base_id}")
def update_knowledge_base_setting(
session: SessionDep,
user: CurrentSuperuserDep,
knowledge_base_id: int,
update: KnowledgeBaseUpdate,
) -> KnowledgeBaseDetail:
try:
knowledge_base = knowledge_base_repo.must_get(session, knowledge_base_id)
knowledge_base = knowledge_base_repo.update(session, knowledge_base, update)
return knowledge_base
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/linked_chat_engines")
def list_kb_linked_chat_engines(
session: SessionDep, user: CurrentSuperuserDep, kb_id: int
) -> list[ChatEngineDescriptor]:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
return knowledge_base_repo.list_linked_chat_engines(session, kb.id)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.delete("/admin/knowledge_bases/{kb_id}")
def delete_knowledge_base(session: SessionDep, user: CurrentSuperuserDep, kb_id: int):
try:
kb = knowledge_base_repo.must_get(session, kb_id)
# Check if the knowledge base has linked chat engines.
linked_chat_engines = knowledge_base_repo.list_linked_chat_engines(
session, kb.id
)
if len(linked_chat_engines) > 0:
raise KBIsUsedByChatEngines(kb_id, len(linked_chat_engines))
# Delete knowledge base.
knowledge_base_repo.delete(session, kb)
# Trigger purge knowledge base related resources after 5 seconds.
purge_knowledge_base_related_resources.apply_async(args=[kb_id], countdown=5)
return {"detail": f"Knowledge base #{kb_id} is deleted successfully"}
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{knowledge_base_id}/overview")
def get_knowledge_base_index_overview(
session: SessionDep,
user: CurrentSuperuserDep,
knowledge_base_id: int,
) -> dict:
try:
knowledge_base = knowledge_base_repo.must_get(session, knowledge_base_id)
stats_for_knowledge_base.delay(knowledge_base.id)
return knowledge_base_repo.get_index_overview(session, knowledge_base)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/vector-index-errors")
def list_kb_vector_index_errors(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
params: Params = Depends(),
) -> Page[VectorIndexError]:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
return knowledge_base_repo.list_vector_index_built_errors(session, kb, params)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/knowledge_bases/{kb_id}/kg-index-errors")
def list_kb_kg_index_errors(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
params: Params = Depends(),
) -> Page[KGIndexError]:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
return knowledge_base_repo.list_kg_index_built_errors(session, kb, params)
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.post("/admin/knowledge_bases/{kb_id}/retry-failed-index-tasks")
def retry_failed_tasks(
session: SessionDep,
user: CurrentSuperuserDep,
kb_id: int,
) -> dict:
try:
kb = knowledge_base_repo.must_get(session, kb_id)
# Retry failed vector index tasks.
document_ids = knowledge_base_repo.set_failed_documents_status_to_pending(
session, kb
)
for document_id in document_ids:
build_index_for_document.delay(kb_id, document_id)
logger.info(f"Triggered {len(document_ids)} documents to rebuilt vector index.")
# Retry failed kg index tasks.
chunk_ids = knowledge_base_repo.set_failed_chunks_status_to_pending(session, kb)
for chunk_id in chunk_ids:
build_kg_index_for_chunk.delay(kb_id, chunk_id)
logger.info(
f"Triggered {len(chunk_ids)} chunks to rebuilt knowledge graph index."
)
return {
"detail": f"Triggered reindex {len(document_ids)} documents and {len(chunk_ids)} chunks of knowledge base #{kb_id}.",
"reindex_document_ids": document_ids,
"reindex_chunk_ids": chunk_ids,
}
except HTTPException:
raise
except Exception as e:
logger.exception(e)
raise InternalServerError()
================================================
FILE: backend/app/api/admin_routes/langfuse.py
================================================
import logging
from pydantic import BaseModel
from fastapi import APIRouter
from langfuse import Langfuse
from app.api.deps import CurrentSuperuserDep
router = APIRouter()
logger = logging.getLogger(__name__)
class LangfuseSetting(BaseModel):
host: str = "https://us.cloud.langfuse.com"
public_key: str
secret_key: str
class LangfuseTestResult(BaseModel):
success: bool
error: str = ""
@router.post("/admin/langfuse/test")
def test_langfuse(
user: CurrentSuperuserDep,
request: LangfuseSetting,
) -> LangfuseTestResult:
try:
lf = Langfuse(
host=request.host,
secret_key=request.secret_key,
public_key=request.public_key,
)
success = lf.auth_check()
if not success:
error = "Langfuse authentication failed, please check public_key, secret_key and host."
else:
error = ""
except Exception as e:
success = False
error = str(e)
return LangfuseTestResult(success=success, error=error)
================================================
FILE: backend/app/api/admin_routes/legacy_retrieve.py
================================================
import logging
from typing import Optional, List
from fastapi import APIRouter
from sqlmodel import Session
from app.models import Document
from app.api.admin_routes.models import ChatEngineBasedRetrieveRequest
from app.api.deps import SessionDep, CurrentSuperuserDep
from llama_index.core.schema import NodeWithScore
from app.exceptions import InternalServerError, KBNotFound
from app.rag.chat.config import ChatEngineConfig
from app.rag.chat.retrieve.retrieve_flow import RetrieveFlow
router = APIRouter()
logger = logging.getLogger(__name__)
def get_override_engine_config(
db_session: Session,
engine_name: str,
# Override chat engine config.
top_k: Optional[int] = None,
similarity_top_k: Optional[int] = None,
oversampling_factor: Optional[int] = None,
refine_question_with_kg: Optional[bool] = None,
) -> ChatEngineConfig:
engine_config = ChatEngineConfig.load_from_db(db_session, engine_name)
if similarity_top_k is not None:
engine_config.vector_search.similarity_top_k = similarity_top_k
if oversampling_factor is not None:
engine_config.vector_search.oversampling_factor = oversampling_factor
if top_k is not None:
engine_config.vector_search.top_k = top_k
if refine_question_with_kg is not None:
engine_config.refine_question_with_kg = refine_question_with_kg
return engine_config
@router.get("/admin/retrieve/documents", deprecated=True)
def legacy_retrieve_documents(
session: SessionDep,
user: CurrentSuperuserDep,
question: str,
chat_engine: str = "default",
# Override chat engine config.
top_k: Optional[int] = 5,
similarity_top_k: Optional[int] = None,
oversampling_factor: Optional[int] = 5,
refine_question_with_kg: Optional[bool] = True,
) -> List[Document]:
try:
engine_config = get_override_engine_config(
db_session=session,
engine_name=chat_engine,
top_k=top_k,
similarity_top_k=similarity_top_k,
oversampling_factor=oversampling_factor,
refine_question_with_kg=refine_question_with_kg,
)
retriever = RetrieveFlow(
db_session=session,
engine_name=chat_engine,
engine_config=engine_config,
)
return retriever.retrieve_documents(question)
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.get("/admin/embedding_retrieve", deprecated=True)
def legacy_retrieve_chunks(
session: SessionDep,
user: CurrentSuperuserDep,
question: str,
chat_engine: str = "default",
# Override chat engine config.
top_k: Optional[int] = 5,
similarity_top_k: Optional[int] = None,
oversampling_factor: Optional[int] = 5,
refine_question_with_kg=False,
) -> List[NodeWithScore]:
try:
engine_config = get_override_engine_config(
db_session=session,
engine_name=chat_engine,
top_k=top_k,
similarity_top_k=similarity_top_k,
oversampling_factor=oversampling_factor,
refine_question_with_kg=refine_question_with_kg,
)
retriever = RetrieveFlow(
db_session=session,
engine_name=chat_engine,
engine_config=engine_config,
)
return retriever.retrieve(question)
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
@router.post("/admin/embedding_retrieve", deprecated=True)
def legacy_retrieve_chunks_2(
session: SessionDep,
user: CurrentSuperuserDep,
request: ChatEngineBasedRetrieveRequest,
) -> List[NodeWithScore]:
try:
engine_config = get_override_engine_config(
db_session=session,
engine_name=request.chat_engine,
top_k=request.top_k,
similarity_top_k=request.similarity_top_k,
oversampling_factor=request.oversampling_factor,
refine_question_with_kg=request.refine_question_with_kg,
)
retriever = RetrieveFlow(
db_session=session,
engine_name=request.chat_engine,
engine_config=engine_config,
)
return retriever.retrieve(request.query)
except KBNotFound as e:
raise e
except Exception as e:
logger.exception(e)
raise InternalServerError()
================================================
FILE: backend/app/api/admin_routes/llm/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/llm/routes.py
================================================
from typing import List
from fastapi import APIRouter, Depends
from fastapi_pagination import Page, Params
from llama_index.core.base.llms.types import ChatMessage
from pydantic import BaseModel
from app.api.deps import CurrentSuperuserDep, SessionDep
from app.logger import logger
from app.models import AdminLLM, LLM, LLMUpdate
from app.rag.llms.provider import LLMProviderOption, llm_provider_options
from app.rag.llms.resolver import resolve_llm
from app.repositories.llm import llm_repo
router = APIRouter()
@router.get("/admin/llms/providers/options")
def list_llm_provider_options(user: CurrentSuperuserDep) -> List[LLMProviderOption]:
return llm_provider_options
@router.get("/admin/llms")
def list_llms(
db_session: SessionDep,
user: CurrentSuperuserDep,
params: Params = Depends(),
) -> Page[AdminLLM]:
return llm_repo.paginate(db_session, params)
class LLMTestResult(BaseModel):
success: bool
error: str = ""
@router.post("/admin/llms/test")
def test_llm(
db_llm: LLM,
user: CurrentSuperuserDep,
) -> LLMTestResult:
try:
llm = resolve_llm(
provider=db_llm.provider,
model=db_llm.model,
config=db_llm.config,
credentials=db_llm.credentials,
)
llm.chat([ChatMessage(role="user", content="Who are you?")])
# Test with dspy LM.
import dspy
from app.rag.llms.dspy import get_dspy_lm_by_llama_llm
dspy_lm = get_dspy_lm_by_llama_llm(llm)
with dspy.context(lm=dspy_lm):
math = dspy.Predict("question -> answer: float")
prediction = math(question="1 + 1 = ?")
assert prediction.answer == 2
success = True
error = ""
except Exception as e:
logger.info(f"Failed to test LLM: {e}")
success = False
error = str(e)
return LLMTestResult(success=success, error=error)
@router.post("/admin/llms")
def create_llm(
db_session: SessionDep,
user: CurrentSuperuserDep,
llm: LLM,
) -> AdminLLM:
return llm_repo.create(db_session, llm)
@router.get("/admin/llms/{llm_id}")
def get_llm(
db_session: SessionDep,
user: CurrentSuperuserDep,
llm_id: int,
) -> AdminLLM:
return llm_repo.must_get(db_session, llm_id)
@router.put("/admin/llms/{llm_id}")
def update_llm(
db_session: SessionDep,
user: CurrentSuperuserDep,
llm_id: int,
llm_update: LLMUpdate,
) -> AdminLLM:
llm = llm_repo.must_get(db_session, llm_id)
return llm_repo.update(db_session, llm, llm_update)
@router.delete("/admin/llms/{llm_id}")
def delete_llm(
db_session: SessionDep,
user: CurrentSuperuserDep,
llm_id: int,
) -> None:
llm = llm_repo.must_get(db_session, llm_id)
llm_repo.delete(db_session, llm)
@router.put("/admin/llms/{llm_id}/set_default")
def set_default_llm(
db_session: SessionDep, user: CurrentSuperuserDep, llm_id: int
) -> AdminLLM:
llm = llm_repo.must_get(db_session, llm_id)
return llm_repo.set_default(db_session, llm)
================================================
FILE: backend/app/api/admin_routes/models.py
================================================
from uuid import UUID
from typing import Optional
from pydantic import BaseModel
from app.api.admin_routes.embedding_model.models import EmbeddingModelItem
from app.rag.llms.provider import LLMProvider
class LLMDescriptor(BaseModel):
id: int
name: str
provider: LLMProvider
model: str
is_default: bool
class EmbeddingModelDescriptor(EmbeddingModelItem):
pass
class UserDescriptor(BaseModel):
id: UUID
email: str
class KnowledgeBaseDescriptor(BaseModel):
id: int
name: str
def __hash__(self):
return hash(self.id)
class DataSourceDescriptor(BaseModel):
id: int
name: str
class ChatEngineDescriptor(BaseModel):
id: int
name: str
is_default: bool
class ChatEngineBasedRetrieveRequest(BaseModel):
query: str
chat_engine: Optional[str] = "default"
top_k: Optional[int] = 5
similarity_top_k: Optional[int] = None
oversampling_factor: Optional[int] = 5
refine_question_with_kg: Optional[bool] = False
================================================
FILE: backend/app/api/admin_routes/reranker_model/__init__.py
================================================
================================================
FILE: backend/app/api/admin_routes/reranker_model/routes.py
================================================
from typing import List
from fastapi import Depends, APIRouter
from fastapi_pagination import Params, Page
from pydantic import BaseModel
from llama_index.core.schema import NodeWithScore, TextNode
from app.api.admin_routes.llm.routes import LLMTestResult
from app.api.deps import CurrentSuperuserDep, SessionDep
from app.models import RerankerModel, AdminRerankerModel
from app.models.reranker_model import RerankerModelUpdate
from app.repositories.reranker_model import reranker_model_repo
from app.rag.rerankers.provider import RerankerProviderOption, reranker_provider_options
from app.rag.rerankers.resolver import resolve_reranker
from app.logger import logger
router = APIRouter()
@router.get("/admin/reranker-models/providers/options")
def list_reranker_model_provider_options(
user: CurrentSuperuserDep,
) -> List[RerankerProviderOption]:
return reranker_provider_options
@router.get("/admin/reranker-models")
def list_reranker_models(
db_session: SessionDep,
user: CurrentSuperuserDep,
params: Params = Depends(),
) -> Page[AdminRerankerModel]:
return reranker_model_repo.paginate(db_session, params)
class RerankerModelTestResult(BaseModel):
success: bool
error: str = ""
@router.post("/admin/reranker-models/test")
def test_reranker_model(
db_reranker_model: RerankerModel, user: CurrentSuperuserDep
) -> LLMTestResult:
try:
reranker = resolve_reranker(
provider=db_reranker_model.provider,
model=db_reranker_model.model,
# for testing purpose, we only rerank 2 nodes
top_n=2,
config=db_reranker_model.config,
credentials=db_reranker_model.credentials,
)
reranked_nodes = reranker.postprocess_nodes(
nodes=[
NodeWithScore(
node=TextNode(
text="TiDB is a distributed SQL database.",
),
score=0.8,
),
NodeWithScore(
node=TextNode(
text="TiKV is a distributed key-value storage engine.",
),
score=0.6,
),
NodeWithScore(
node=TextNode(
text="TiFlash is a columnar storage engine.",
),
score=0.4,
),
],
query_str="What is TiDB?",
)
if len(reranked_nodes) != 2:
raise ValueError("expected 2 nodes, but got %d", len(reranked_nodes))
success = True
error = ""
except Exception as e:
logger.info(f"Failed to test reranker model: {e}")
success = False
error = str(e)
return RerankerModelTestResult(success=success, error=error)
@router.post("/admin/reranker-models")
def create_reranker_model(
db_session: SessionDep,
user: CurrentSuperuserDep,
reranker_model: RerankerModel,
) -> AdminRerankerModel:
return reranker_model_repo.create(db_session, reranker_model)
@router.get("/admin/reranker-models/{model_id}")
def get_reranker_model(
db_session: SessionDep,
user: CurrentSuperuserDep,
model_id: int,
) -> AdminRerankerModel:
return reranker_model_repo.must_get(db_session, model_id)
@router.put("/admin/reranker-models/{model_id}")
def update_reranker_model(
db_session: SessionDep,
user: CurrentSuperuserDep,
model_id: int,
model_update: RerankerModelUpdate,
) -> AdminRerankerModel:
reranker_model = reranker_model_repo.must_get(db_session, model_id)
return reranker_model_repo.update(db_session, reranker_model, model_update)
@router.delete("/admin/reranker-models/{model_id}")
def delete_reranker_model(
db_session: SessionDep,
user: CurrentSuperuserDep,
model_id: int,
) -> None:
reranker_model = reranker_model_repo.must_get(db_session, model_id)
reranker_model_repo.delete(db_session, reranker_model)
@router.put("/admin/reranker-models/{model_id}/set_default")
def set_default_reranker_model(
db_session: SessionDep, user: CurrentSuperuserDep, model_id: int
) -> AdminRerankerModel:
reranker_model = reranker_model_repo.must_get(db_session, model_id)
return reranker_model_repo.set_default(db_session, reranker_model)
================================================
FILE: backend/app/api/admin_routes/semantic_cache.py
================================================
from typing import Optional, Dict
import time
import logging
from fastapi import APIRouter, Body
from app.api.deps import SessionDep, CurrentSuperuserDep
from app.rag.chat.config import ChatEngineConfig
from app.rag.semantic_cache import SemanticCacheManager, SemanticItem
router = APIRouter()
logger = logging.getLogger(__name__)
@router.post("/admin/semantic_cache")
async def add_semantic_cache(
session: SessionDep,
user: CurrentSuperuserDep,
question: str,
answer: str,
namespace: str = "default",
chat_engine: str = "default",
metadata: Optional[dict] = Body(None),
) -> Dict:
chat_engine_config = ChatEngineConfig.load_from_db(session, chat_engine)
_dspy_lm = chat_engine_config.get_dspy_lm(session)
scm = SemanticCacheManager(
dspy_llm=_dspy_lm,
)
try:
scm.add_cache(
session,
item=SemanticItem(question=question, answer=answer),
namespace=namespace,
metadata=metadata,
)
except Exception as e:
return {
"status": "failed",
"message": str(e),
}
return {
"status": "success",
}
@router.get("/admin/semantic_cache")
async def search_semantic_cache(
session: SessionDep,
user: CurrentSuperuserDep,
query: str,
namespace: str = "default",
chat_engine: str = "default",
) -> Dict:
start_time = time.time()
chat_engine_config = ChatEngineConfig.load_from_db(session, chat_engine)
_dspy_lm = chat_engine_config.get_dspy_lm(session)
logger.debug(
f"[search_semantic_cache] Loading dspy_lm took {time.time() - start_time:.2f} seconds"
)
scm = SemanticCacheManager(
dspy_llm=_dspy_lm,
)
start_time = time.time()
response = scm.search(
session=session,
query=query,
namespace=namespace,
)
logger.debug(
f"[search_semantic_cache] Searching semantic cache took {time.time() - start_time:.2f} seconds"
)
return response
================================================
FILE: backend/app/api/admin_routes/site_setting.py
================================================
from typing import Dict
from pydantic import BaseModel
from http import HTTPStatus
from fastapi import APIRouter, HTTPException
from app.api.deps import CurrentSuperuserDep, SessionDep
from app.site_settings import SiteSetting, SettingValue, SettingType
router = APIR
gitextract_vw5kjonn/
├── .dockerignore
├── .github/
│ ├── actions/
│ │ └── decide/
│ │ ├── .gitignore
│ │ ├── action.yml
│ │ ├── index.js
│ │ └── package.json
│ └── workflows/
│ ├── backend-test.yml
│ ├── deploy.yml
│ ├── regression.yml
│ ├── release.yml
│ └── verify.yml
├── .gitignore
├── CONTRIBUTING.md
├── LICENSE.txt
├── README.md
├── backend/
│ ├── .dockerignore
│ ├── .gitignore
│ ├── .pre-commit-config.yaml
│ ├── .python-version
│ ├── Dockerfile
│ ├── Makefile
│ ├── README.md
│ ├── alembic.ini
│ ├── app/
│ │ ├── __init__.py
│ │ ├── alembic/
│ │ │ ├── env.py
│ │ │ ├── script.py.mako
│ │ │ └── versions/
│ │ │ ├── 00534dc350db_.py
│ │ │ ├── 041fbef26e3a_.py
│ │ │ ├── 04947f9684ab_public_chat_engine.py
│ │ │ ├── 04d4f05116ed_.py
│ │ │ ├── 04d81be446c3_.py
│ │ │ ├── 10f36e8a25c4_.py
│ │ │ ├── 197bc8be72d1_.py
│ │ │ ├── 211f3c5aa125_chunking_settings.py
│ │ │ ├── 27a6723b767a_.py
│ │ │ ├── 2adc0b597dcd_int_enum_type.py
│ │ │ ├── 2fc10c21bf88_.py
│ │ │ ├── 749767db5505_add_recommend_questions.py
│ │ │ ├── 8093333c0d87_.py
│ │ │ ├── 830fd9c44f39_.py
│ │ │ ├── 94b198e20946_.py
│ │ │ ├── a54f966436ce_evaluation.py
│ │ │ ├── a8c79553c9f6_.py
│ │ │ ├── ac6e4d58580d_.py
│ │ │ ├── bd17a4ebccc5_.py
│ │ │ ├── c7f016a904c1_.py
│ │ │ ├── d2ad44deab20_multiple_kb.py
│ │ │ ├── dfee070b8abd_.py
│ │ │ ├── e32f1e546eec_.py
│ │ │ └── eb0b85608c0a_.py
│ │ ├── api/
│ │ │ ├── __init__.py
│ │ │ ├── admin_routes/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chat/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── chat_engine.py
│ │ │ │ ├── document/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── embedding_model/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── models.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── evaluation/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── evaluation_dataset.py
│ │ │ │ │ ├── evaluation_task.py
│ │ │ │ │ ├── models.py
│ │ │ │ │ └── tools.py
│ │ │ │ ├── feedback.py
│ │ │ │ ├── knowledge_base/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── chunk/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── data_source/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── document/
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── graph/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── knowledge/
│ │ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ │ └── routes.py
│ │ │ │ │ │ ├── models.py
│ │ │ │ │ │ └── routes.py
│ │ │ │ │ ├── models.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── langfuse.py
│ │ │ │ ├── legacy_retrieve.py
│ │ │ │ ├── llm/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── models.py
│ │ │ │ ├── reranker_model/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── routes.py
│ │ │ │ ├── semantic_cache.py
│ │ │ │ ├── site_setting.py
│ │ │ │ ├── stats.py
│ │ │ │ ├── upload.py
│ │ │ │ └── user.py
│ │ │ ├── deps.py
│ │ │ ├── main.py
│ │ │ └── routes/
│ │ │ ├── __init__.py
│ │ │ ├── api_key.py
│ │ │ ├── chat.py
│ │ │ ├── chat_engine.py
│ │ │ ├── document.py
│ │ │ ├── feedback.py
│ │ │ ├── index.py
│ │ │ ├── models.py
│ │ │ ├── retrieve/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── models.py
│ │ │ │ └── routes.py
│ │ │ └── user.py
│ │ ├── api_server.py
│ │ ├── auth/
│ │ │ ├── api_keys.py
│ │ │ ├── db.py
│ │ │ ├── schemas.py
│ │ │ └── users.py
│ │ ├── celery.py
│ │ ├── core/
│ │ │ ├── config.py
│ │ │ └── db.py
│ │ ├── evaluation/
│ │ │ ├── evals.py
│ │ │ └── evaluators/
│ │ │ ├── __init__.py
│ │ │ ├── e2e_rag_evaluator.py
│ │ │ ├── language_detector.py
│ │ │ └── toxicity.py
│ │ ├── exceptions.py
│ │ ├── experiments/
│ │ │ ├── sql_extraction.py
│ │ │ └── sql_sample_gen.py
│ │ ├── file_storage/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── local.py
│ │ ├── logger.py
│ │ ├── models/
│ │ │ ├── __init__.py
│ │ │ ├── api_key.py
│ │ │ ├── auth.py
│ │ │ ├── base.py
│ │ │ ├── chat.py
│ │ │ ├── chat_engine.py
│ │ │ ├── chat_message.py
│ │ │ ├── chunk.py
│ │ │ ├── data_source.py
│ │ │ ├── document.py
│ │ │ ├── embed_model.py
│ │ │ ├── entity.py
│ │ │ ├── evaluation_dataset.py
│ │ │ ├── evaluation_task.py
│ │ │ ├── feedback.py
│ │ │ ├── knowledge_base.py
│ │ │ ├── knowledge_base_scoped/
│ │ │ │ ├── __init__.py
│ │ │ │ └── table_naming.py
│ │ │ ├── llm.py
│ │ │ ├── recommend_question.py
│ │ │ ├── relationship.py
│ │ │ ├── reranker_model.py
│ │ │ ├── semantic_cache.py
│ │ │ ├── site_setting.py
│ │ │ ├── staff_action_log.py
│ │ │ └── upload.py
│ │ ├── rag/
│ │ │ ├── __init__.py
│ │ │ ├── build_index.py
│ │ │ ├── chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chat_flow.py
│ │ │ │ ├── chat_service.py
│ │ │ │ ├── config.py
│ │ │ │ ├── retrieve/
│ │ │ │ │ └── retrieve_flow.py
│ │ │ │ └── stream_protocol.py
│ │ │ ├── datasource/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── consts.py
│ │ │ │ ├── file.py
│ │ │ │ ├── web_base.py
│ │ │ │ ├── web_single_page.py
│ │ │ │ └── web_sitemap.py
│ │ │ ├── default_prompt.py
│ │ │ ├── embeddings/
│ │ │ │ ├── local/
│ │ │ │ │ └── local_embedding.py
│ │ │ │ ├── open_like/
│ │ │ │ │ └── openai_like_embedding.py
│ │ │ │ ├── provider.py
│ │ │ │ └── resolver.py
│ │ │ ├── indices/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── knowledge_graph/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── extractor.py
│ │ │ │ │ ├── graph_store/
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── helpers.py
│ │ │ │ │ │ ├── schema.py
│ │ │ │ │ │ ├── tidb_graph_editor.py
│ │ │ │ │ │ └── tidb_graph_store.py
│ │ │ │ │ └── schema.py
│ │ │ │ └── vector_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── vector_store/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tidb_vector_store.py
│ │ │ ├── knowledge_base/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── config.py
│ │ │ │ ├── index_store.py
│ │ │ │ └── schema.py
│ │ │ ├── llms/
│ │ │ │ ├── dspy.py
│ │ │ │ ├── provider.py
│ │ │ │ └── resolver.py
│ │ │ ├── node_parser/
│ │ │ │ ├── __init__.py
│ │ │ │ └── file/
│ │ │ │ └── markdown.py
│ │ │ ├── postprocessors/
│ │ │ │ ├── __init__.py
│ │ │ │ └── metadata_post_filter.py
│ │ │ ├── query_dispatcher.py
│ │ │ ├── question_gen/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── helpers.py
│ │ │ │ └── query_decomposer.py
│ │ │ ├── rerankers/
│ │ │ │ ├── baisheng/
│ │ │ │ │ └── baisheng_reranker.py
│ │ │ │ ├── local/
│ │ │ │ │ └── local_reranker.py
│ │ │ │ ├── provider.py
│ │ │ │ ├── resolver.py
│ │ │ │ └── vllm/
│ │ │ │ └── vllm_reranker.py
│ │ │ ├── retrievers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chunk/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── fusion_retriever.py
│ │ │ │ │ ├── helpers.py
│ │ │ │ │ ├── schema.py
│ │ │ │ │ └── simple_retriever.py
│ │ │ │ ├── knowledge_graph/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── fusion_retriever.py
│ │ │ │ │ ├── schema.py
│ │ │ │ │ └── simple_retriever.py
│ │ │ │ └── multiple_knowledge_base.py
│ │ │ ├── semantic_cache/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── types.py
│ │ │ └── utils.py
│ │ ├── repositories/
│ │ │ ├── __init__.py
│ │ │ ├── base_repo.py
│ │ │ ├── chat.py
│ │ │ ├── chat_engine.py
│ │ │ ├── chunk.py
│ │ │ ├── data_source.py
│ │ │ ├── document.py
│ │ │ ├── embedding_model.py
│ │ │ ├── feedback.py
│ │ │ ├── graph.py
│ │ │ ├── knowledge_base.py
│ │ │ ├── llm.py
│ │ │ ├── reranker_model.py
│ │ │ ├── staff_action_log.py
│ │ │ └── user.py
│ │ ├── site_settings/
│ │ │ ├── __init__.py
│ │ │ ├── default.py
│ │ │ ├── default_settings.yml
│ │ │ └── types.py
│ │ ├── staff_action/
│ │ │ └── __init__.py
│ │ ├── tasks/
│ │ │ ├── __init__.py
│ │ │ ├── build_index.py
│ │ │ ├── evaluate.py
│ │ │ └── knowledge_base.py
│ │ ├── types.py
│ │ └── utils/
│ │ ├── aes.py
│ │ ├── namespace.py
│ │ ├── singleflight_cache.py
│ │ ├── tracing.py
│ │ └── uuid6.py
│ ├── bootstrap.py
│ ├── dspy_compiled_program/
│ │ └── decompose_query/
│ │ ├── demos.json
│ │ └── program.json
│ ├── dspy_program.py
│ ├── local_embedding_reranker/
│ │ ├── .dockerignore
│ │ ├── Dockerfile
│ │ ├── main.py
│ │ └── requirements.txt
│ ├── main.py
│ ├── prestart.sh
│ ├── pyproject.toml
│ ├── supervisord.conf
│ └── tests/
│ ├── __init__.py
│ ├── conftest.py
│ ├── test_dynamic_models.py
│ └── test_llms.py
├── core/
│ ├── .cursor/
│ │ └── rules/
│ │ └── code-style.mdc
│ ├── .gitignore
│ ├── .python-version
│ ├── Makefile
│ ├── README.md
│ ├── autoflow/
│ │ ├── __init__.py
│ │ ├── chunkers/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── helper.py
│ │ │ └── text.py
│ │ ├── configs/
│ │ │ ├── __init__.py
│ │ │ ├── chunkers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── text.py
│ │ │ ├── db.py
│ │ │ ├── knowledge_base.py
│ │ │ ├── main.py
│ │ │ └── models/
│ │ │ ├── __init__.py
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── common.py
│ │ │ │ ├── jina_ai.py
│ │ │ │ └── openai.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── common.py
│ │ │ │ └── openai.py
│ │ │ ├── manager.py
│ │ │ ├── providers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── jinaai.py
│ │ │ │ └── openai.py
│ │ │ └── rerankers/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── common.py
│ │ │ └── jina_ai.py
│ │ ├── data_types.py
│ │ ├── db.py
│ │ ├── knowledge_base/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── prompts.py
│ │ ├── knowledge_graph/
│ │ │ ├── __init__.py
│ │ │ ├── extractors/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── simple.py
│ │ │ ├── index.py
│ │ │ ├── programs/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_graph.py
│ │ │ │ ├── extract_covariates.py
│ │ │ │ └── extract_graph.py
│ │ │ ├── retrievers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── simple.py
│ │ │ │ └── weighted.py
│ │ │ └── types.py
│ │ ├── loaders/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── helper.py
│ │ │ ├── markdown.py
│ │ │ ├── pdf.py
│ │ │ └── webpage.py
│ │ ├── main.py
│ │ ├── models/
│ │ │ ├── __init__.py
│ │ │ ├── embedding_models/
│ │ │ │ ├── __init__.py
│ │ │ │ └── litellm.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── dspy.py
│ │ │ │ └── litellm.py
│ │ │ ├── manager.py
│ │ │ ├── provider.py
│ │ │ └── rerank_models/
│ │ │ ├── __init__.py
│ │ │ └── litellm.py
│ │ ├── orms/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── py.typed
│ │ ├── storage/
│ │ │ ├── __init__.py
│ │ │ ├── doc_store/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── tidb_doc_store.py
│ │ │ │ └── types.py
│ │ │ ├── graph_store/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── tidb_graph_store.py
│ │ │ │ └── types.py
│ │ │ └── types.py
│ │ ├── types.py
│ │ └── utils/
│ │ ├── hash.py
│ │ ├── uuid6.py
│ │ └── vector.py
│ ├── examples/
│ │ ├── README.md
│ │ ├── quickstart/
│ │ │ ├── fixtures/
│ │ │ │ ├── analyze-slow-queries.md
│ │ │ │ └── tidb-overview.md
│ │ │ └── quickstart.ipynb
│ │ └── streamlit/
│ │ ├── README.md
│ │ ├── build-knowledge-search-with-autoflow-and-streamlit.py
│ │ └── reqs.txt
│ ├── experimental/
│ │ ├── README.md
│ │ └── kg_extraction/
│ │ └── extract_graph.ipynb
│ ├── pyproject.toml
│ └── tests/
│ ├── __init__.py
│ ├── conftest.py
│ ├── fixtures/
│ │ ├── analyze-slow-queries.md
│ │ └── tidb-overview.md
│ ├── knowledge_base/
│ │ ├── __init__.py
│ │ ├── test_kb_with_namespace.py
│ │ └── test_kb_without_namespace.py
│ ├── knowledge_graph/
│ │ ├── programs/
│ │ │ └── test_extract_graph.py
│ │ └── test_kg_extractor.py
│ ├── models/
│ │ └── test_model_manager.py
│ └── storage/
│ ├── __init__.py
│ ├── doc_store/
│ │ └── test_tidb_doc_store.py
│ └── graph_store/
│ ├── __init__.py
│ └── test_tidb_graph_store.py
├── docker-compose-cn.yml
├── docker-compose.dev.yml
├── docker-compose.yml
├── docs/
│ ├── .gitignore
│ ├── mdx-components.ts
│ ├── next-sitemap.config.js
│ ├── next.config.mjs
│ ├── package.json
│ ├── src/
│ │ ├── app/
│ │ │ ├── [[...mdxPath]]/
│ │ │ │ └── page.jsx
│ │ │ ├── _app.tsx
│ │ │ ├── _ignored/
│ │ │ │ ├── _meta.js
│ │ │ │ └── page.mdx
│ │ │ ├── _meta.ts
│ │ │ ├── globals.css
│ │ │ └── layout.jsx
│ │ └── content/
│ │ ├── README.md
│ │ ├── _meta.ts
│ │ ├── chat-engine.mdx
│ │ ├── deploy-with-docker.mdx
│ │ ├── embedding-model.mdx
│ │ ├── evaluation.mdx
│ │ ├── faq.mdx
│ │ ├── index.mdx
│ │ ├── javascript.mdx
│ │ ├── knowledge-base.mdx
│ │ ├── llm.mdx
│ │ ├── quick-start.mdx
│ │ ├── releases/
│ │ │ ├── _meta.ts
│ │ │ ├── index.mdx
│ │ │ ├── v0.1.0.md
│ │ │ ├── v0.2.0.md
│ │ │ ├── v0.3.0.md
│ │ │ └── v0.4.0.md
│ │ ├── requirements.mdx
│ │ ├── reranker-model.mdx
│ │ └── resources.mdx
│ └── tsconfig.json
├── e2e/
│ ├── .gitignore
│ ├── README.md
│ ├── deploy-test-result.sh
│ ├── docker-compose.yml
│ ├── global.setup.ts
│ ├── package.json
│ ├── playwright.config.ts
│ ├── prepare-test.sh
│ ├── res/
│ │ └── sample-evaluation-dataset.csv
│ ├── start-test.sh
│ ├── test-html/
│ │ ├── example-doc-1.html
│ │ ├── example-doc-2.html
│ │ ├── example-sitemap.xml
│ │ ├── widget-controlled.html
│ │ └── widget.html
│ ├── tests/
│ │ ├── api-keys.spec.ts
│ │ ├── api.spec.ts
│ │ ├── bootstrap.ts
│ │ ├── chat-engine.spec.ts
│ │ ├── chat.spec.ts
│ │ ├── datasource.spec.ts
│ │ ├── evaluation.spec.ts
│ │ ├── knowledge-base.spec.ts
│ │ ├── site-settings.spec.ts
│ │ └── widget.spec.ts
│ ├── utils/
│ │ ├── chat.ts
│ │ ├── forms.ts
│ │ └── login.ts
│ └── vercel.json
└── frontend/
├── .gitignore
├── .nvmrc
├── .prettierignore
├── Dockerfile
├── app/
│ ├── .eslintrc.json
│ ├── .gitignore
│ ├── .storybook/
│ │ ├── main.ts
│ │ └── preview.ts
│ ├── README.md
│ ├── components.json
│ ├── jest.config.ts
│ ├── jest.polyfills.js
│ ├── next-sitemap.config.js
│ ├── next.config.ts
│ ├── notice.md
│ ├── package.json
│ ├── postcss.config.mjs
│ ├── public/
│ │ └── chats.mock.txt
│ ├── src/
│ │ ├── api/
│ │ │ ├── .gitignore
│ │ │ ├── api-keys.ts
│ │ │ ├── auth.ts
│ │ │ ├── chat-engines.ts
│ │ │ ├── chats.ts
│ │ │ ├── commons.ts
│ │ │ ├── datasources.ts
│ │ │ ├── documents.ts
│ │ │ ├── embedding-models.ts
│ │ │ ├── evaluations.ts
│ │ │ ├── feedbacks.ts
│ │ │ ├── graph.ts
│ │ │ ├── knowledge-base.ts
│ │ │ ├── llms.ts
│ │ │ ├── providers.ts
│ │ │ ├── rag.ts
│ │ │ ├── rerankers.ts
│ │ │ ├── site-settings.ts
│ │ │ ├── stats.ts
│ │ │ ├── system.ts
│ │ │ └── users.ts
│ │ ├── app/
│ │ │ ├── (experimental)/
│ │ │ │ └── experimental-features/
│ │ │ │ └── route.ts
│ │ │ ├── (main)/
│ │ │ │ ├── (.)auth/
│ │ │ │ │ └── login/
│ │ │ │ │ ├── loading.tsx
│ │ │ │ │ ├── page.client.tsx
│ │ │ │ │ └── page.tsx
│ │ │ │ ├── (admin)/
│ │ │ │ │ ├── chat-engines/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── new/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── embedding-models/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── evaluation/
│ │ │ │ │ │ ├── datasets/
│ │ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ │ ├── items/
│ │ │ │ │ │ │ │ │ ├── [itemId]/
│ │ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ │ └── new/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── not-found.tsx
│ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── page.tsx
│ │ │ │ │ │ └── tasks/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ ├── not-found.tsx
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── feedbacks/
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── knowledge-bases/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ ├── (special)/
│ │ │ │ │ │ │ │ ├── data-sources/
│ │ │ │ │ │ │ │ │ └── new/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ └── documents/
│ │ │ │ │ │ │ │ └── [documentId]/
│ │ │ │ │ │ │ │ └── chunks/
│ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ ├── (tabs)/
│ │ │ │ │ │ │ │ ├── data-sources/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── index-progress/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── knowledge-graph-explorer/
│ │ │ │ │ │ │ │ │ ├── create-synopsis-entity/
│ │ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ ├── layout.tsx
│ │ │ │ │ │ │ │ ├── page.tsx
│ │ │ │ │ │ │ │ ├── settings/
│ │ │ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ │ │ └── tabs.tsx
│ │ │ │ │ │ │ ├── api.ts
│ │ │ │ │ │ │ └── context.tsx
│ │ │ │ │ │ ├── new/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── layout.tsx
│ │ │ │ │ ├── llms/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── reranker-models/
│ │ │ │ │ │ ├── [id]/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── create/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── site-settings/
│ │ │ │ │ │ ├── custom_js/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── integrations/
│ │ │ │ │ │ │ └── page.tsx
│ │ │ │ │ │ ├── layout.tsx
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ └── stats/
│ │ │ │ │ └── trending/
│ │ │ │ │ └── page.tsx
│ │ │ │ ├── (user)/
│ │ │ │ │ ├── api-keys/
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ ├── c/
│ │ │ │ │ │ └── page.tsx
│ │ │ │ │ └── layout.tsx
│ │ │ │ ├── c/
│ │ │ │ │ └── [id]/
│ │ │ │ │ └── page.tsx
│ │ │ │ ├── layout.tsx
│ │ │ │ ├── nav.tsx
│ │ │ │ └── page.tsx
│ │ │ ├── RootProviders.tsx
│ │ │ ├── api/
│ │ │ │ └── [[...fallback_placeholder]]/
│ │ │ │ └── route.ts
│ │ │ ├── auth/
│ │ │ │ └── login/
│ │ │ │ └── page.tsx
│ │ │ ├── chart-theme.css
│ │ │ ├── globals.css
│ │ │ └── layout.tsx
│ │ ├── components/
│ │ │ ├── admin-page-heading.tsx
│ │ │ ├── admin-page-layout.tsx
│ │ │ ├── api-keys/
│ │ │ │ └── CreateApiKeyForm.tsx
│ │ │ ├── auth/
│ │ │ │ └── AuthProvider.tsx
│ │ │ ├── auto-scroll/
│ │ │ │ ├── auto-scroll.stories.tsx
│ │ │ │ ├── auto-scroll.tsx
│ │ │ │ ├── context.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── manual-scroll-voter.tsx
│ │ │ │ ├── use-auto-scroll-voter.ts
│ │ │ │ └── use-request-scroll.ts
│ │ │ ├── branding.tsx
│ │ │ ├── cells/
│ │ │ │ ├── actions.tsx
│ │ │ │ ├── boolean.tsx
│ │ │ │ ├── datetime.tsx
│ │ │ │ ├── error-message.tsx
│ │ │ │ ├── link.tsx
│ │ │ │ ├── metadata.tsx
│ │ │ │ ├── mono.tsx
│ │ │ │ ├── percent.tsx
│ │ │ │ └── reference.tsx
│ │ │ ├── charts/
│ │ │ │ ├── IndexProgressChart.stories.tsx
│ │ │ │ ├── IndexProgressChart.tsx
│ │ │ │ ├── TotalCard.stories.tsx
│ │ │ │ ├── TotalCard.tsx
│ │ │ │ └── TrendsChart.tsx
│ │ │ ├── chat/
│ │ │ │ ├── ask.tsx
│ │ │ │ ├── chat-controller.test.ts
│ │ │ │ ├── chat-controller.ts
│ │ │ │ ├── chat-hooks.tsx
│ │ │ │ ├── chat-message-controller.test.ts
│ │ │ │ ├── chat-message-controller.ts
│ │ │ │ ├── chat-new-dialog.tsx
│ │ │ │ ├── chat-stream-state.ts
│ │ │ │ ├── chat-stream.state.test.ts
│ │ │ │ ├── chats-history.tsx
│ │ │ │ ├── chats-table.tsx
│ │ │ │ ├── conversation-message-groups.scss
│ │ │ │ ├── conversation-message-groups.tsx
│ │ │ │ ├── conversation.test.tsx
│ │ │ │ ├── conversation.tsx
│ │ │ │ ├── debug-info.tsx
│ │ │ │ ├── knowledge-graph-debug-info.tsx
│ │ │ │ ├── message-annotation-history-stackvm.tsx
│ │ │ │ ├── message-annotation-history.tsx
│ │ │ │ ├── message-answer.tsx
│ │ │ │ ├── message-auto-scroll.tsx
│ │ │ │ ├── message-beta-alert.tsx
│ │ │ │ ├── message-content-sources.tsx
│ │ │ │ ├── message-content.test.tsx
│ │ │ │ ├── message-content.tsx
│ │ │ │ ├── message-error.tsx
│ │ │ │ ├── message-feedback.tsx
│ │ │ │ ├── message-input.tsx
│ │ │ │ ├── message-operations.tsx
│ │ │ │ ├── message-recommend-questions.tsx
│ │ │ │ ├── message-section.tsx
│ │ │ │ ├── testutils.ts
│ │ │ │ ├── use-ask.ts
│ │ │ │ ├── use-message-feedback.ts
│ │ │ │ └── utils.ts
│ │ │ ├── chat-engine/
│ │ │ │ ├── chat-engines-table.tsx
│ │ │ │ ├── create-chat-engine-form.tsx
│ │ │ │ ├── hooks.ts
│ │ │ │ ├── kb-list-select.tsx
│ │ │ │ └── update-chat-engine-form.tsx
│ │ │ ├── code-theme.scss
│ │ │ ├── config-viewer.tsx
│ │ │ ├── copy-button.tsx
│ │ │ ├── dangerous-action-button.tsx
│ │ │ ├── data-table-heading.tsx
│ │ │ ├── data-table-remote.tsx
│ │ │ ├── data-table.tsx
│ │ │ ├── datasource/
│ │ │ │ ├── create-datasource-form.tsx
│ │ │ │ ├── datasource-card.tsx
│ │ │ │ ├── datasource-create-option.tsx
│ │ │ │ ├── no-datasource-placeholder.tsx
│ │ │ │ └── update-datasource-form.tsx
│ │ │ ├── date-format.tsx
│ │ │ ├── date-range-picker.tsx
│ │ │ ├── diff-seconds.tsx
│ │ │ ├── document-viewer.tsx
│ │ │ ├── documents/
│ │ │ │ ├── documents-table-filters.tsx
│ │ │ │ └── documents-table.tsx
│ │ │ ├── embedding-models/
│ │ │ │ ├── CreateEmbeddingModelForm.tsx
│ │ │ │ ├── EmbeddingModelInfo.tsx
│ │ │ │ ├── EmbeddingModelsTable.tsx
│ │ │ │ ├── UpdateEmbeddingModelForm.tsx
│ │ │ │ └── hooks.tsx
│ │ │ ├── error-card.tsx
│ │ │ ├── evaluations/
│ │ │ │ ├── cells.tsx
│ │ │ │ ├── create-evaluation-dataset-form.stories.tsx
│ │ │ │ ├── create-evaluation-dataset-form.tsx
│ │ │ │ ├── create-evaluation-dataset-item-form.stories.tsx
│ │ │ │ ├── create-evaluation-dataset-item-form.tsx
│ │ │ │ ├── create-evaluation-task-form.stories.tsx
│ │ │ │ ├── create-evaluation-task-form.tsx
│ │ │ │ ├── evaluation-dataset-info.tsx
│ │ │ │ ├── evaluation-dataset-items-table.tsx
│ │ │ │ ├── evaluation-datasets-table.tsx
│ │ │ │ ├── evaluation-task-info.stories.tsx
│ │ │ │ ├── evaluation-task-info.tsx
│ │ │ │ ├── evaluation-task-items-table.tsx
│ │ │ │ ├── evaluation-tasks-table.tsx
│ │ │ │ ├── hooks.ts
│ │ │ │ ├── keyword-filter-toolbar.tsx
│ │ │ │ └── update-evaluation-dataset-item-form.tsx
│ │ │ ├── feedbacks/
│ │ │ │ └── feedbacks-table.tsx
│ │ │ ├── form/
│ │ │ │ ├── biz.tsx
│ │ │ │ ├── control-widget.tsx
│ │ │ │ ├── create-entity-form.tsx
│ │ │ │ ├── field-layout.tsx
│ │ │ │ ├── root-error.tsx
│ │ │ │ ├── utils.ts
│ │ │ │ └── widgets/
│ │ │ │ ├── CodeInput.tsx
│ │ │ │ ├── FileInput.tsx
│ │ │ │ ├── FilesInput.tsx
│ │ │ │ └── PromptInput.tsx
│ │ │ ├── form-sections.tsx
│ │ │ ├── graph/
│ │ │ │ ├── GraphCreateEntity.tsx
│ │ │ │ ├── GraphEditor.tsx
│ │ │ │ ├── action.ts
│ │ │ │ ├── components/
│ │ │ │ │ ├── EditingButton.tsx
│ │ │ │ │ ├── EntitiesTable.tsx
│ │ │ │ │ ├── InputField.tsx
│ │ │ │ │ ├── JsonEditor.tsx
│ │ │ │ │ ├── JsonField.tsx
│ │ │ │ │ ├── LinkDetails.tsx
│ │ │ │ │ ├── NetworkCanvas.tsx
│ │ │ │ │ ├── NetworkContext.ts
│ │ │ │ │ ├── NetworkViewer.tsx
│ │ │ │ │ ├── NodeDetails.tsx
│ │ │ │ │ ├── SearchEntity.tsx
│ │ │ │ │ ├── SearchEntityById.tsx
│ │ │ │ │ └── TextareaField.tsx
│ │ │ │ ├── index.ts
│ │ │ │ ├── network/
│ │ │ │ │ ├── CanvasNetworkRenderer.ts
│ │ │ │ │ ├── Network.ts
│ │ │ │ │ └── NetworkRendererOptions.ts
│ │ │ │ ├── remote.ts
│ │ │ │ ├── selectEntities.ts
│ │ │ │ ├── useDirtyEntity.ts
│ │ │ │ ├── useDirtyRelationship.ts
│ │ │ │ ├── useNetwork.ts
│ │ │ │ └── utils.ts
│ │ │ ├── gtag-provider.tsx
│ │ │ ├── html-viewer.tsx
│ │ │ ├── icons/
│ │ │ │ └── index.ts
│ │ │ ├── knowledge-base/
│ │ │ │ ├── create-knowledge-base-form.stories.tsx
│ │ │ │ ├── create-knowledge-base-form.tsx
│ │ │ │ ├── document-chunks-table.tsx
│ │ │ │ ├── empty-state.tsx
│ │ │ │ ├── form-index-methods.tsx
│ │ │ │ ├── hooks.ts
│ │ │ │ ├── knowledge-base-card.stories.tsx
│ │ │ │ ├── knowledge-base-card.tsx
│ │ │ │ ├── knowledge-base-chunking-config-fields.tsx
│ │ │ │ ├── knowledge-base-index.tsx
│ │ │ │ └── knowledge-base-settings-form.tsx
│ │ │ ├── llm/
│ │ │ │ ├── CreateLLMForm.tsx
│ │ │ │ ├── LLMsTable.tsx
│ │ │ │ ├── LlmInfo.tsx
│ │ │ │ ├── UpdateLLMForm.tsx
│ │ │ │ └── hooks.ts
│ │ │ ├── loader.tsx
│ │ │ ├── managed-dialog-close.tsx
│ │ │ ├── managed-dialog.tsx
│ │ │ ├── managed-panel.tsx
│ │ │ ├── model-component-info.tsx
│ │ │ ├── nextjs/
│ │ │ │ └── NextLink.tsx
│ │ │ ├── option-detail.tsx
│ │ │ ├── portal-provider.tsx
│ │ │ ├── provider-description.tsx
│ │ │ ├── py-viewer.tsx
│ │ │ ├── remark-content/
│ │ │ │ ├── components.tsx
│ │ │ │ ├── context.tsx
│ │ │ │ ├── highlight.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── remark-content.stories.tsx
│ │ │ │ ├── remark-content.tsx
│ │ │ │ └── style.scss
│ │ │ ├── reranker/
│ │ │ │ ├── CreateRerankerForm.tsx
│ │ │ │ ├── RerankerInfo.tsx
│ │ │ │ ├── RerankerModelsTable.tsx
│ │ │ │ ├── UpdateRerankerForm.tsx
│ │ │ │ └── hooks.ts
│ │ │ ├── resource-not-found.tsx
│ │ │ ├── row-checkbox.tsx
│ │ │ ├── secondary-navigator-list.tsx
│ │ │ ├── security-setting-provider.tsx
│ │ │ ├── settings/
│ │ │ │ ├── CustomJsSettings.tsx
│ │ │ │ ├── IntegrationsSettings.tsx
│ │ │ │ ├── LinkArrayField.tsx
│ │ │ │ ├── SettingsField.tsx
│ │ │ │ ├── StringArrayField.tsx
│ │ │ │ ├── WebsiteSettings.tsx
│ │ │ │ └── WidgetSnippet.tsx
│ │ │ ├── settings-form/
│ │ │ │ ├── GeneralSettingsField.tsx
│ │ │ │ ├── GeneralSettingsForm.tsx
│ │ │ │ ├── accessor-helper.ts
│ │ │ │ ├── context.tsx
│ │ │ │ ├── index.ts
│ │ │ │ └── utils.ts
│ │ │ ├── signin.tsx
│ │ │ ├── site-header-actions.tsx
│ │ │ ├── site-header.tsx
│ │ │ ├── site-nav.tsx
│ │ │ ├── system/
│ │ │ │ ├── BootstrapStatusProvider.tsx
│ │ │ │ └── SystemWizardBanner.tsx
│ │ │ ├── theme-toggle.tsx
│ │ │ ├── theme.stories.tsx
│ │ │ ├── themed-style.ts
│ │ │ ├── ui/
│ │ │ │ ├── accordion.tsx
│ │ │ │ ├── alert-dialog.tsx
│ │ │ │ ├── alert.tsx
│ │ │ │ ├── aspect-ratio.tsx
│ │ │ │ ├── avatar.tsx
│ │ │ │ ├── badge.tsx
│ │ │ │ ├── breadcrumb.tsx
│ │ │ │ ├── button.tsx
│ │ │ │ ├── calendar.tsx
│ │ │ │ ├── card.tsx
│ │ │ │ ├── carousel.tsx
│ │ │ │ ├── chart.tsx
│ │ │ │ ├── checkbox.tsx
│ │ │ │ ├── collapsible.tsx
│ │ │ │ ├── command.tsx
│ │ │ │ ├── context-menu.tsx
│ │ │ │ ├── dialog.tsx
│ │ │ │ ├── dot-pattern.tsx
│ │ │ │ ├── drawer.tsx
│ │ │ │ ├── dropdown-menu.tsx
│ │ │ │ ├── form.beta.tsx
│ │ │ │ ├── form.tsx
│ │ │ │ ├── hover-card.tsx
│ │ │ │ ├── input-otp.tsx
│ │ │ │ ├── input.tsx
│ │ │ │ ├── label.tsx
│ │ │ │ ├── menubar.tsx
│ │ │ │ ├── navigation-menu.tsx
│ │ │ │ ├── pagination.tsx
│ │ │ │ ├── popover.tsx
│ │ │ │ ├── progress.tsx
│ │ │ │ ├── radio-group.tsx
│ │ │ │ ├── resizable.tsx
│ │ │ │ ├── scroll-area.tsx
│ │ │ │ ├── select.tsx
│ │ │ │ ├── separator.tsx
│ │ │ │ ├── sheet.tsx
│ │ │ │ ├── sidebar.tsx
│ │ │ │ ├── skeleton.tsx
│ │ │ │ ├── slider.tsx
│ │ │ │ ├── sonner.tsx
│ │ │ │ ├── switch.tsx
│ │ │ │ ├── table.tsx
│ │ │ │ ├── tabs.tsx
│ │ │ │ ├── textarea.tsx
│ │ │ │ ├── toast.tsx
│ │ │ │ ├── toaster.tsx
│ │ │ │ ├── toggle-group.tsx
│ │ │ │ ├── toggle.tsx
│ │ │ │ ├── tooltip.tsx
│ │ │ │ └── use-toast.ts
│ │ │ ├── use-active-theme.ts
│ │ │ ├── use-data-table.ts
│ │ │ ├── use-href.ts
│ │ │ ├── use-latest-ref.tsx
│ │ │ ├── use-search-param.ts
│ │ │ ├── use-size.ts
│ │ │ └── website-setting-provider.tsx
│ │ ├── core/
│ │ │ └── schema/
│ │ │ ├── NOTICE.md
│ │ │ └── settings/
│ │ │ └── security.ts
│ │ ├── experimental/
│ │ │ ├── chat-verify-service/
│ │ │ │ ├── api.mock.ts
│ │ │ │ ├── api.react-server.ts
│ │ │ │ ├── api.tidbai-widget.ts
│ │ │ │ ├── api.ts
│ │ │ │ ├── message-verify-result-markdown.tsx
│ │ │ │ ├── message-verify.stories.tsx
│ │ │ │ └── message-verify.tsx
│ │ │ ├── experimental-features-provider.tsx
│ │ │ └── experimental-features.ts
│ │ ├── hooks/
│ │ │ ├── use-mobile.tsx
│ │ │ └── use-model-provider.ts
│ │ └── lib/
│ │ ├── auth.ts
│ │ ├── buffered-readable-stream.test.ts
│ │ ├── buffered-readable-stream.ts
│ │ ├── errors.ts
│ │ ├── react.ts
│ │ ├── request/
│ │ │ ├── authenticationHeaders.mock.ts
│ │ │ ├── authenticationHeaders.react-server.ts
│ │ │ ├── authenticationHeaders.tidbai-widget.ts
│ │ │ ├── authenticationHeaders.ts
│ │ │ ├── base-url.mock.ts
│ │ │ ├── base-url.react-server.ts
│ │ │ ├── base-url.tidbai-widget.ts
│ │ │ ├── base-url.ts
│ │ │ ├── errors.ts
│ │ │ ├── index.ts
│ │ │ ├── list-all-helper.ts
│ │ │ ├── params.ts
│ │ │ ├── response-handlers.ts
│ │ │ └── url.ts
│ │ ├── stackvm/
│ │ │ ├── core/
│ │ │ │ ├── index.ts
│ │ │ │ ├── instructions/
│ │ │ │ │ ├── index.ts
│ │ │ │ │ ├── instructions.ts
│ │ │ │ │ └── registry.ts
│ │ │ │ ├── model.ts
│ │ │ │ ├── types.ts
│ │ │ │ └── visit.ts
│ │ │ └── index.ts
│ │ ├── strings.ts
│ │ ├── tanstack-form.ts
│ │ ├── typing-utils.ts
│ │ ├── ui-error.tsx
│ │ ├── utils.ts
│ │ ├── zod.test.ts
│ │ └── zod.ts
│ ├── tailwind.config.ts
│ └── tsconfig.json
├── package.json
├── packages/
│ └── widget-react/
│ ├── .eslintrc.cjs
│ ├── .gitignore
│ ├── README.md
│ ├── USAGE.md
│ ├── index.html
│ ├── package.json
│ ├── postcss.config.js
│ ├── src/
│ │ ├── Widget.css
│ │ ├── Widget.tsx
│ │ ├── index.css
│ │ ├── library.tsx
│ │ ├── load-config.ts
│ │ ├── overrides/
│ │ │ ├── README.md
│ │ │ └── components/
│ │ │ ├── code-theme.scss
│ │ │ └── remark-content/
│ │ │ └── style.scss
│ │ ├── prepare-gtag.ts
│ │ └── vite-env.d.ts
│ ├── tailwind.config.ts
│ ├── tsconfig.app.json
│ ├── tsconfig.json
│ ├── tsconfig.node.json
│ └── vite.config.ts
├── patches/
│ ├── @jest__environment@29.7.0.patch
│ └── jest-runtime@29.7.0.patch
└── pnpm-workspace.yaml
Showing preview only (234K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (2442 symbols across 602 files)
FILE: .github/actions/decide/index.js
constant FRONTEND_PREFIX (line 4) | const FRONTEND_PREFIX = core.getInput('pr-e2e-frontend-label-prefix', { ...
constant BACKEND_PREFIX (line 5) | const BACKEND_PREFIX = core.getInput('pr-e2e-backend-label-prefix', { re...
function run (line 7) | function run() {
function findPRLabel (line 40) | function findPRLabel(test) {
FILE: backend/app/alembic/env.py
function get_url (line 33) | def get_url():
function include_name (line 37) | def include_name(name, type_, parent_names):
function run_migrations_offline (line 48) | def run_migrations_offline():
function run_migrations_online (line 73) | def run_migrations_online():
FILE: backend/app/alembic/versions/00534dc350db_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 51) | def downgrade():
FILE: backend/app/alembic/versions/041fbef26e3a_.py
function upgrade (line 21) | def upgrade():
function downgrade (line 40) | def downgrade():
FILE: backend/app/alembic/versions/04947f9684ab_public_chat_engine.py
function upgrade (line 19) | def upgrade():
function downgrade (line 25) | def downgrade():
FILE: backend/app/alembic/versions/04d4f05116ed_.py
function upgrade (line 22) | def upgrade():
function downgrade (line 87) | def downgrade():
FILE: backend/app/alembic/versions/04d81be446c3_.py
function upgrade (line 19) | def upgrade():
function downgrade (line 32) | def downgrade():
FILE: backend/app/alembic/versions/10f36e8a25c4_.py
function upgrade (line 19) | def upgrade():
function downgrade (line 30) | def downgrade():
FILE: backend/app/alembic/versions/197bc8be72d1_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 32) | def downgrade():
FILE: backend/app/alembic/versions/211f3c5aa125_chunking_settings.py
function upgrade (line 20) | def upgrade():
function downgrade (line 28) | def downgrade():
FILE: backend/app/alembic/versions/27a6723b767a_.py
function upgrade (line 19) | def upgrade():
function downgrade (line 34) | def downgrade():
FILE: backend/app/alembic/versions/2adc0b597dcd_int_enum_type.py
function upgrade (line 22) | def upgrade():
function downgrade (line 34) | def downgrade():
FILE: backend/app/alembic/versions/2fc10c21bf88_.py
function upgrade (line 23) | def upgrade():
function downgrade (line 442) | def downgrade():
FILE: backend/app/alembic/versions/749767db5505_add_recommend_questions.py
function upgrade (line 20) | def upgrade():
function downgrade (line 54) | def downgrade():
FILE: backend/app/alembic/versions/8093333c0d87_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 30) | def downgrade():
FILE: backend/app/alembic/versions/830fd9c44f39_.py
function upgrade (line 21) | def upgrade():
function downgrade (line 46) | def downgrade():
FILE: backend/app/alembic/versions/94b198e20946_.py
function upgrade (line 21) | def upgrade():
function downgrade (line 88) | def downgrade():
FILE: backend/app/alembic/versions/a54f966436ce_evaluation.py
function upgrade (line 20) | def upgrade():
function downgrade (line 132) | def downgrade():
FILE: backend/app/alembic/versions/a8c79553c9f6_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 27) | def downgrade():
FILE: backend/app/alembic/versions/ac6e4d58580d_.py
function upgrade (line 21) | def upgrade():
function downgrade (line 38) | def downgrade():
FILE: backend/app/alembic/versions/bd17a4ebccc5_.py
function upgrade (line 21) | def upgrade():
function downgrade (line 36) | def downgrade():
FILE: backend/app/alembic/versions/c7f016a904c1_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 26) | def downgrade():
FILE: backend/app/alembic/versions/d2ad44deab20_multiple_kb.py
function upgrade (line 27) | def upgrade():
function downgrade (line 119) | def downgrade():
FILE: backend/app/alembic/versions/dfee070b8abd_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 43) | def downgrade():
FILE: backend/app/alembic/versions/e32f1e546eec_.py
function upgrade (line 22) | def upgrade():
function downgrade (line 61) | def downgrade():
FILE: backend/app/alembic/versions/eb0b85608c0a_.py
function upgrade (line 20) | def upgrade():
function downgrade (line 39) | def downgrade():
FILE: backend/app/api/admin_routes/chat/routes.py
function list_chat_origins (line 17) | def list_chat_origins(
FILE: backend/app/api/admin_routes/chat_engine.py
function list_chat_engines (line 14) | def list_chat_engines(
function create_chat_engine (line 23) | def create_chat_engine(
function get_chat_engine (line 32) | def get_chat_engine(
function update_chat_engine (line 41) | def update_chat_engine(
function delete_chat_engine (line 52) | def delete_chat_engine(
function get_default_config (line 64) | def get_default_config(
FILE: backend/app/api/admin_routes/document/routes.py
function list_documents (line 17) | def list_documents(
FILE: backend/app/api/admin_routes/embedding_model/models.py
class EmbeddingModelCreate (line 9) | class EmbeddingModelCreate(BaseModel):
method vector_dimension_must_gt_1 (line 19) | def vector_dimension_must_gt_1(cls, v: int) -> int:
class EmbeddingModelUpdate (line 27) | class EmbeddingModelUpdate(BaseModel):
class EmbeddingModelItem (line 33) | class EmbeddingModelItem(BaseModel):
class EmbeddingModelDetail (line 42) | class EmbeddingModelDetail(BaseModel):
class EmbeddingModelTestResult (line 54) | class EmbeddingModelTestResult(BaseModel):
FILE: backend/app/api/admin_routes/embedding_model/routes.py
function list_embedding_model_provider_options (line 26) | def list_embedding_model_provider_options(
function list_embedding_models (line 33) | def list_embedding_models(
function test_embedding_model (line 40) | def test_embedding_model(
function create_embedding_model (line 67) | def create_embedding_model(
function get_embedding_model_detail (line 76) | def get_embedding_model_detail(
function update_embedding_model (line 83) | def update_embedding_model(
function delete_embedding_model (line 94) | def delete_embedding_model(
function set_default_embedding_model (line 102) | def set_default_embedding_model(
FILE: backend/app/api/admin_routes/evaluation/evaluation_dataset.py
function create_evaluation_dataset (line 23) | def create_evaluation_dataset(
function delete_evaluation_dataset (line 95) | def delete_evaluation_dataset(
function update_evaluation_dataset (line 107) | def update_evaluation_dataset(
function list_evaluation_dataset (line 125) | def list_evaluation_dataset(
function create_evaluation_dataset_item (line 139) | def create_evaluation_dataset_item(
function delete_evaluation_dataset_item (line 160) | def delete_evaluation_dataset_item(
function update_evaluation_dataset_item (line 174) | def update_evaluation_dataset_item(
function list_evaluation_dataset_item (line 201) | def list_evaluation_dataset_item(
function get_evaluation_dataset_item (line 219) | def get_evaluation_dataset_item(
FILE: backend/app/api/admin_routes/evaluation/evaluation_task.py
function create_evaluation_task (line 34) | def create_evaluation_task(
function cancel_evaluation_task (line 93) | def cancel_evaluation_task(
function get_evaluation_task (line 109) | def get_evaluation_task(
function get_evaluation_task_summary (line 118) | def get_evaluation_task_summary(
function list_evaluation_task (line 126) | def list_evaluation_task(
function list_evaluation_task_items (line 150) | def list_evaluation_task_items(
function get_summary_for_evaluation_task (line 172) | def get_summary_for_evaluation_task(
FILE: backend/app/api/admin_routes/evaluation/models.py
class CreateEvaluationTask (line 9) | class CreateEvaluationTask(BaseModel):
class EvaluationTaskOverview (line 16) | class EvaluationTaskOverview(BaseModel):
class EvaluationTaskSummary (line 32) | class EvaluationTaskSummary(BaseModel):
class UpdateEvaluationDataset (line 43) | class UpdateEvaluationDataset(BaseModel):
class CreateEvaluationDataset (line 47) | class CreateEvaluationDataset(BaseModel):
class ModifyEvaluationDatasetItem (line 52) | class ModifyEvaluationDatasetItem(BaseModel):
class ParamsWithKeyword (line 60) | class ParamsWithKeyword(Params):
FILE: backend/app/api/admin_routes/evaluation/tools.py
function must_get (line 8) | def must_get(session: Session, model: Type[T], item_id: int) -> T:
function must_get_and_belong (line 18) | def must_get_and_belong(
FILE: backend/app/api/admin_routes/feedback.py
function list_feedbacks (line 18) | def list_feedbacks(
function list_feedback_origins (line 32) | def list_feedback_origins(
FILE: backend/app/api/admin_routes/knowledge_base/chunk/models.py
class KBChunkRetrievalConfig (line 6) | class KBChunkRetrievalConfig(BaseModel):
class KBRetrieveChunksRequest (line 11) | class KBRetrieveChunksRequest(BaseModel):
FILE: backend/app/api/admin_routes/knowledge_base/chunk/routes.py
function retrieve_chunks (line 18) | def retrieve_chunks(
FILE: backend/app/api/admin_routes/knowledge_base/data_source/models.py
class KBDataSource (line 6) | class KBDataSource(BaseModel):
class KBDataSourceMutable (line 17) | class KBDataSourceMutable(BaseModel):
method name_must_not_be_blank (line 21) | def name_must_not_be_blank(cls, v: str) -> str:
class KBDataSourceCreate (line 27) | class KBDataSourceCreate(KBDataSourceMutable):
class KBDataSourceUpdate (line 32) | class KBDataSourceUpdate(KBDataSourceMutable):
FILE: backend/app/api/admin_routes/knowledge_base/data_source/routes.py
function create_kb_datasource (line 26) | def create_kb_datasource(
function update_kb_datasource (line 57) | def update_kb_datasource(
function get_kb_datasource (line 85) | def get_kb_datasource(
function list_kb_datasources (line 104) | def list_kb_datasources(
function remove_kb_datasource (line 114) | def remove_kb_datasource(
FILE: backend/app/api/admin_routes/knowledge_base/document/models.py
class DocumentFilters (line 12) | class DocumentFilters(BaseModel):
class DocumentItem (line 47) | class DocumentItem(BaseModel):
class RebuildIndexResult (line 64) | class RebuildIndexResult(BaseModel):
FILE: backend/app/api/admin_routes/knowledge_base/document/routes.py
function list_kb_documents (line 33) | def list_kb_documents(
function get_kb_document_by_id (line 56) | def get_kb_document_by_id(
function list_kb_document_chunks (line 74) | def list_kb_document_chunks(
function remove_kb_document (line 92) | def remove_kb_document(
function rebuild_kb_documents_index (line 135) | def rebuild_kb_documents_index(
function rebuild_kb_document_index (line 154) | def rebuild_kb_document_index(
function rebuild_kb_document_index_by_ids (line 173) | def rebuild_kb_document_index_by_ids(
FILE: backend/app/api/admin_routes/knowledge_base/graph/knowledge/routes.py
function retrieve_knowledge (line 20) | def retrieve_knowledge(session: SessionDep, kb_id: int, request: Knowled...
function retrieve_knowledge_neighbors (line 41) | def retrieve_knowledge_neighbors(
function retrieve_knowledge_chunks (line 63) | def retrieve_knowledge_chunks(
FILE: backend/app/api/admin_routes/knowledge_base/graph/models.py
class SynopsisEntityCreate (line 9) | class SynopsisEntityCreate(BaseModel):
method validate_entities (line 17) | def validate_entities(self):
class EntityUpdate (line 23) | class EntityUpdate(BaseModel):
class RelationshipUpdate (line 29) | class RelationshipUpdate(BaseModel):
class GraphSearchRequest (line 35) | class GraphSearchRequest(BaseModel):
class KBKnowledgeGraphRetrievalConfig (line 46) | class KBKnowledgeGraphRetrievalConfig(BaseModel):
class KBRetrieveKnowledgeGraphRequest (line 50) | class KBRetrieveKnowledgeGraphRequest(BaseModel):
class KnowledgeRequest (line 59) | class KnowledgeRequest(BaseModel):
class KnowledgeNeighborRequest (line 65) | class KnowledgeNeighborRequest(BaseModel):
class KnowledgeChunkRequest (line 73) | class KnowledgeChunkRequest(BaseModel):
FILE: backend/app/api/admin_routes/knowledge_base/graph/routes.py
function search_similar_entities (line 42) | def search_similar_entities(
function create_synopsis_entity (line 60) | def create_synopsis_entity(
function get_entity (line 85) | def get_entity(session: SessionDep, kb_id: int, entity_id: int):
function update_entity (line 107) | def update_entity(
function get_entity_subgraph (line 131) | def get_entity_subgraph(session: SessionDep, kb_id: int, entity_id: int)...
function get_relationship (line 157) | def get_relationship(session: SessionDep, kb_id: int, relationship_id: i...
function update_relationship (line 179) | def update_relationship(
function retrieve_kb_knowledge_graph (line 206) | def retrieve_kb_knowledge_graph(
function legacy_search_graph (line 228) | def legacy_search_graph(session: SessionDep, kb_id: int, request: GraphS...
function get_entire_knowledge_graph (line 251) | def get_entire_knowledge_graph(session: SessionDep, kb_id: int):
function stream_entire_knowledge_graph (line 268) | def stream_entire_knowledge_graph(session: SessionDep, kb_id: int):
FILE: backend/app/api/admin_routes/knowledge_base/models.py
class KnowledgeBaseCreate (line 20) | class KnowledgeBaseCreate(BaseModel):
method name_must_not_be_blank (line 32) | def name_must_not_be_blank(cls, v: str) -> str:
method index_methods_must_has_vector (line 38) | def index_methods_must_has_vector(cls, v: list[IndexMethod]) -> list[I...
class KnowledgeBaseUpdate (line 46) | class KnowledgeBaseUpdate(BaseModel):
class KnowledgeBaseDetail (line 52) | class KnowledgeBaseDetail(BaseModel):
class KnowledgeBaseItem (line 76) | class KnowledgeBaseItem(BaseModel):
class VectorIndexError (line 92) | class VectorIndexError(BaseModel):
class KGIndexError (line 99) | class KGIndexError(BaseModel):
class ChunkItem (line 107) | class ChunkItem(BaseModel):
class RetrievalRequest (line 122) | class RetrievalRequest(BaseModel):
FILE: backend/app/api/admin_routes/knowledge_base/routes.py
function create_knowledge_base (line 48) | def create_knowledge_base(
function list_knowledge_bases (line 103) | def list_knowledge_bases(
function get_knowledge_base (line 112) | def get_knowledge_base(
function update_knowledge_base_setting (line 127) | def update_knowledge_base_setting(
function list_kb_linked_chat_engines (line 145) | def list_kb_linked_chat_engines(
function delete_knowledge_base (line 159) | def delete_knowledge_base(session: SessionDep, user: CurrentSuperuserDep...
function get_knowledge_base_index_overview (line 185) | def get_knowledge_base_index_overview(
function list_kb_vector_index_errors (line 204) | def list_kb_vector_index_errors(
function list_kb_kg_index_errors (line 221) | def list_kb_kg_index_errors(
function retry_failed_tasks (line 238) | def retry_failed_tasks(
FILE: backend/app/api/admin_routes/langfuse.py
class LangfuseSetting (line 13) | class LangfuseSetting(BaseModel):
class LangfuseTestResult (line 19) | class LangfuseTestResult(BaseModel):
function test_langfuse (line 25) | def test_langfuse(
FILE: backend/app/api/admin_routes/legacy_retrieve.py
function get_override_engine_config (line 19) | def get_override_engine_config(
function legacy_retrieve_documents (line 41) | def legacy_retrieve_documents(
function legacy_retrieve_chunks (line 75) | def legacy_retrieve_chunks(
function legacy_retrieve_chunks_2 (line 109) | def legacy_retrieve_chunks_2(
FILE: backend/app/api/admin_routes/llm/routes.py
function list_llm_provider_options (line 20) | def list_llm_provider_options(user: CurrentSuperuserDep) -> List[LLMProv...
function list_llms (line 25) | def list_llms(
class LLMTestResult (line 33) | class LLMTestResult(BaseModel):
function test_llm (line 39) | def test_llm(
function create_llm (line 72) | def create_llm(
function get_llm (line 81) | def get_llm(
function update_llm (line 90) | def update_llm(
function delete_llm (line 101) | def delete_llm(
function set_default_llm (line 111) | def set_default_llm(
FILE: backend/app/api/admin_routes/models.py
class LLMDescriptor (line 9) | class LLMDescriptor(BaseModel):
class EmbeddingModelDescriptor (line 17) | class EmbeddingModelDescriptor(EmbeddingModelItem):
class UserDescriptor (line 21) | class UserDescriptor(BaseModel):
class KnowledgeBaseDescriptor (line 26) | class KnowledgeBaseDescriptor(BaseModel):
method __hash__ (line 30) | def __hash__(self):
class DataSourceDescriptor (line 34) | class DataSourceDescriptor(BaseModel):
class ChatEngineDescriptor (line 39) | class ChatEngineDescriptor(BaseModel):
class ChatEngineBasedRetrieveRequest (line 45) | class ChatEngineBasedRetrieveRequest(BaseModel):
FILE: backend/app/api/admin_routes/reranker_model/routes.py
function list_reranker_model_provider_options (line 23) | def list_reranker_model_provider_options(
function list_reranker_models (line 30) | def list_reranker_models(
class RerankerModelTestResult (line 38) | class RerankerModelTestResult(BaseModel):
function test_reranker_model (line 44) | def test_reranker_model(
function create_reranker_model (line 91) | def create_reranker_model(
function get_reranker_model (line 100) | def get_reranker_model(
function update_reranker_model (line 109) | def update_reranker_model(
function delete_reranker_model (line 120) | def delete_reranker_model(
function set_default_reranker_model (line 130) | def set_default_reranker_model(
FILE: backend/app/api/admin_routes/semantic_cache.py
function add_semantic_cache (line 16) | async def add_semantic_cache(
function search_semantic_cache (line 51) | async def search_semantic_cache(
FILE: backend/app/api/admin_routes/site_setting.py
function site_settings (line 13) | def site_settings(user: CurrentSuperuserDep):
class SettingUpdate (line 17) | class SettingUpdate(BaseModel):
function update_site_setting (line 51) | def update_site_setting(
FILE: backend/app/api/admin_routes/stats.py
class DateRangeStats (line 11) | class DateRangeStats(BaseModel):
class ChatStats (line 16) | class ChatStats(DateRangeStats):
function chat_count_trend (line 21) | def chat_count_trend(
function chat_origin_trend (line 29) | def chat_origin_trend(
FILE: backend/app/api/admin_routes/upload.py
function upload_files (line 28) | def upload_files(
FILE: backend/app/api/admin_routes/user.py
function search_users (line 18) | def search_users(
FILE: backend/app/api/routes/api_key.py
class CreateApiKeyRequest (line 12) | class CreateApiKeyRequest(BaseModel):
class CreateApiKeyResponse (line 16) | class CreateApiKeyResponse(BaseModel):
function create_api_key (line 21) | async def create_api_key(
function list_api_keys (line 31) | async def list_api_keys(
function delete_api_key (line 40) | async def delete_api_key(
FILE: backend/app/api/routes/chat.py
class ChatRequest (line 37) | class ChatRequest(BaseModel):
method check_messages (line 45) | def check_messages(cls, messages: List[ChatMessage]) -> List[ChatMessa...
function chats (line 61) | def chats(
function list_chats (line 99) | def list_chats(
function get_chat (line 111) | def get_chat(session: SessionDep, user: OptionalUserDep, chat_id: UUID):
function update_chat (line 124) | def update_chat(
function delete_chat (line 144) | def delete_chat(session: SessionDep, user: CurrentUserDep, chat_id: UUID):
function get_chat_subgraph (line 165) | def get_chat_subgraph(session: SessionDep, user: OptionalUserDep, chat_m...
function get_recommended_questions (line 184) | def get_recommended_questions(
function refresh_recommended_questions (line 204) | def refresh_recommended_questions(
FILE: backend/app/api/routes/chat_engine.py
function list_chat_engines (line 17) | def list_chat_engines(
function get_chat_engine (line 29) | def get_chat_engine(
FILE: backend/app/api/routes/document.py
function download_file (line 11) | def download_file(doc_id: int, session: SessionDep):
FILE: backend/app/api/routes/feedback.py
class FeedbackRequest (line 12) | class FeedbackRequest(BaseModel):
function feedback (line 20) | def feedback(
FILE: backend/app/api/routes/index.py
function status (line 17) | def status(session: SessionDep):
function site_config (line 23) | def site_config() -> dict:
function system_bootstrap_status (line 28) | def system_bootstrap_status(session: SessionDep) -> SystemConfigStatusRe...
FILE: backend/app/api/routes/models.py
class RequiredConfigStatus (line 4) | class RequiredConfigStatus(BaseModel):
class OptionalConfigStatus (line 11) | class OptionalConfigStatus(BaseModel):
class NeedMigrationStatus (line 16) | class NeedMigrationStatus(BaseModel):
class SystemConfigStatusResponse (line 20) | class SystemConfigStatusResponse(BaseModel):
FILE: backend/app/api/routes/retrieve/models.py
class ChunkRetrievalConfig (line 14) | class ChunkRetrievalConfig(FusionRetrievalBaseConfig):
class ChunksRetrievalRequest (line 19) | class ChunksRetrievalRequest(BaseModel):
class KnowledgeGraphRetrievalConfig (line 27) | class KnowledgeGraphRetrievalConfig(FusionRetrievalBaseConfig):
class KnowledgeGraphRetrievalRequest (line 31) | class KnowledgeGraphRetrievalRequest(BaseModel):
FILE: backend/app/api/routes/retrieve/routes.py
function retrieve_chunks (line 24) | def retrieve_chunks(
function retrieve_knowledge_graph (line 48) | def retrieve_knowledge_graph(
FILE: backend/app/api/routes/user.py
function me (line 10) | def me(user: CurrentUserDep):
FILE: backend/app/api_server.py
function custom_generate_unique_id (line 18) | def custom_generate_unique_id(route: APIRoute) -> str:
function lifespan (line 32) | async def lifespan(app: FastAPI):
function identify_browser (line 59) | async def identify_browser(request: Request, call_next):
FILE: backend/app/auth/api_keys.py
function generate_api_key (line 16) | def generate_api_key(length=50):
function get_api_key_from_request (line 26) | def get_api_key_from_request(request: Request) -> str | None:
function encrypt_api_key (line 37) | def encrypt_api_key(api_key: str) -> str:
class ApiKeyManager (line 51) | class ApiKeyManager:
method acreate_api_key (line 52) | async def acreate_api_key(
method create_api_key (line 68) | def create_api_key(
method get_active_user_by_raw_api_key (line 84) | async def get_active_user_by_raw_api_key(
method get_active_user_from_request (line 105) | async def get_active_user_from_request(
method list_api_keys (line 111) | async def list_api_keys(
method delete_api_key (line 123) | async def delete_api_key(self, session: AsyncSession, user: User, api_...
FILE: backend/app/auth/db.py
function get_user_db (line 10) | async def get_user_db(session: AsyncSession = Depends(get_db_async_sessi...
function get_user_session_db (line 14) | async def get_user_session_db(
FILE: backend/app/auth/schemas.py
class UserRead (line 6) | class UserRead(schemas.BaseUser[uuid.UUID]):
class UserCreate (line 10) | class UserCreate(schemas.BaseUserCreate):
class UserUpdate (line 14) | class UserUpdate(schemas.BaseUserUpdate):
FILE: backend/app/auth/users.py
class UserManager (line 29) | class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
method on_after_register (line 33) | async def on_after_register(self, user: User, request: Optional[Reques...
method on_after_forgot_password (line 36) | async def on_after_forgot_password(
method on_after_request_verify (line 41) | async def on_after_request_verify(
function get_user_manager (line 47) | async def get_user_manager(user_db: SQLModelUserDatabaseAsync = Depends(...
function get_database_strategy (line 58) | def get_database_strategy(
function current_user (line 78) | async def current_user(
function current_superuser (line 96) | async def current_superuser(
function optional_current_user (line 117) | async def optional_current_user(
function create_user (line 136) | async def create_user(
function update_user_password (line 162) | async def update_user_password(
FILE: backend/app/core/config.py
function parse_cors (line 19) | def parse_cors(v: Any) -> list[str] | str:
class Environment (line 27) | class Environment(str, enum.Enum):
class Settings (line 33) | class Settings(BaseSettings):
method server_host (line 54) | def server_host(self) -> str:
method _validate_sentry_sample_rate (line 71) | def _validate_sentry_sample_rate(self) -> Self:
method SQLALCHEMY_DATABASE_URI (line 110) | def SQLALCHEMY_DATABASE_URI(self) -> MySQLDsn:
method SQLALCHEMY_ASYNC_DATABASE_URI (line 127) | def SQLALCHEMY_ASYNC_DATABASE_URI(self) -> MySQLDsn:
method _validate_secrets (line 138) | def _validate_secrets(self) -> Self:
FILE: backend/app/core/db.py
function get_ssl_context (line 28) | def get_ssl_context():
function prepare_db_connection (line 48) | def prepare_db_connection(dbapi_connection, connection_record):
function get_db_session (line 60) | def get_db_session() -> Generator[Session, None, None]:
function get_db_async_session (line 65) | async def get_db_async_session() -> AsyncGenerator[AsyncSession, None]:
FILE: backend/app/evaluation/evals.py
class Evaluation (line 40) | class Evaluation:
method __init__ (line 57) | def __init__(
method runeval_dataset (line 92) | def runeval_dataset(
method run (line 193) | def run(self, metrics: list = DEFAULT_METRICS) -> None:
method parse_sample (line 232) | def parse_sample(self, item: DatasetItemClient):
method _generate_answer_by_tidb_ai (line 264) | def _generate_answer_by_tidb_ai(self, messages: list) -> (str, str):
method generate_answer_by_tidb_ai (line 290) | def generate_answer_by_tidb_ai(self, messages: list) -> str:
function parse_langfuse_trace_id_from_url (line 310) | def parse_langfuse_trace_id_from_url(trace_url: str) -> str:
function fetch_rag_data (line 315) | def fetch_rag_data(langfuse_client: Langfuse, tracing_id: str):
FILE: backend/app/evaluation/evaluators/e2e_rag_evaluator.py
class E2ERagEvaluator (line 18) | class E2ERagEvaluator:
method __init__ (line 19) | def __init__(self, model="gpt-4o", threshold=0.7) -> None:
method evaluate (line 39) | def evaluate(
FILE: backend/app/evaluation/evaluators/language_detector.py
class EvaluationData (line 25) | class EvaluationData(BaseModel):
class LanguageEvaluator (line 31) | class LanguageEvaluator(BaseEvaluator):
method __init__ (line 47) | def __init__(
method _get_prompts (line 68) | def _get_prompts(self) -> PromptDictType:
method _update_prompts (line 74) | def _update_prompts(self, prompts: PromptDictType) -> None:
method aevaluate (line 79) | async def aevaluate(
FILE: backend/app/evaluation/evaluators/toxicity.py
class ToxicityEvaluator (line 62) | class ToxicityEvaluator(BaseEvaluator):
method __init__ (line 63) | def __init__(
method _get_prompts (line 83) | def _get_prompts(self) -> PromptDictType:
method _update_prompts (line 89) | def _update_prompts(self, prompts: PromptDictType) -> None:
method aevaluate (line 94) | async def aevaluate(
FILE: backend/app/exceptions.py
class InternalServerError (line 9) | class InternalServerError(HTTPException):
method __init__ (line 10) | def __init__(self):
class ChatException (line 17) | class ChatException(HTTPException):
class ChatNotFound (line 21) | class ChatNotFound(ChatException):
method __init__ (line 24) | def __init__(self, chat_id: UUID):
class ChatMessageNotFound (line 28) | class ChatMessageNotFound(ChatException):
method __init__ (line 31) | def __init__(self, message_id: int):
class LLMException (line 38) | class LLMException(HTTPException):
class LLMNotFound (line 42) | class LLMNotFound(LLMException):
method __init__ (line 45) | def __init__(self, llm_id: int):
class DefaultLLMNotFound (line 49) | class DefaultLLMNotFound(LLMException):
method __init__ (line 52) | def __init__(self):
class EmbeddingModelException (line 59) | class EmbeddingModelException(HTTPException):
class EmbeddingModelNotFound (line 63) | class EmbeddingModelNotFound(EmbeddingModelException):
method __init__ (line 66) | def __init__(self, model_id: int):
class DefaultEmbeddingModelNotFound (line 70) | class DefaultEmbeddingModelNotFound(EmbeddingModelException):
method __init__ (line 73) | def __init__(self):
class RerankerModelException (line 80) | class RerankerModelException(HTTPException):
class RerankerModelNotFound (line 84) | class RerankerModelNotFound(RerankerModelException):
method __init__ (line 87) | def __init__(self, model_id: int):
class DefaultRerankerModelNotFound (line 91) | class DefaultRerankerModelNotFound(RerankerModelException):
method __init__ (line 94) | def __init__(self):
class KBException (line 101) | class KBException(HTTPException):
class KBNotFound (line 105) | class KBNotFound(KBException):
method __init__ (line 108) | def __init__(self, knowledge_base_id: int):
class KBDataSourceNotFound (line 112) | class KBDataSourceNotFound(KBException):
method __init__ (line 115) | def __init__(self, kb_id: int, data_source_id: int):
class KBNoLLMConfigured (line 121) | class KBNoLLMConfigured(KBException):
method __init__ (line 124) | def __init__(self):
class KBNoEmbedModelConfigured (line 128) | class KBNoEmbedModelConfigured(KBException):
method __init__ (line 131) | def __init__(self):
class KBNoVectorIndexConfigured (line 135) | class KBNoVectorIndexConfigured(KBException):
method __init__ (line 138) | def __init__(self):
class KBNotAllowedUpdateEmbedModel (line 142) | class KBNotAllowedUpdateEmbedModel(KBException):
method __init__ (line 145) | def __init__(self):
class KBIsUsedByChatEngines (line 149) | class KBIsUsedByChatEngines(KBException):
method __init__ (line 152) | def __init__(self, kb_id, chat_engines_num: int):
class DocumentException (line 159) | class DocumentException(HTTPException):
class DocumentNotFound (line 163) | class DocumentNotFound(DocumentException):
method __init__ (line 166) | def __init__(self, document_id: int):
class ChatEngineException (line 173) | class ChatEngineException(HTTPException):
class ChatEngineNotFound (line 177) | class ChatEngineNotFound(ChatEngineException):
method __init__ (line 180) | def __init__(self, chat_engine_id: int):
class DefaultChatEngineCannotBeDeleted (line 184) | class DefaultChatEngineCannotBeDeleted(ChatEngineException):
method __init__ (line 187) | def __init__(self, chat_engine_id: int):
FILE: backend/app/experiments/sql_extraction.py
class SampleExtract (line 11) | class SampleExtract(dspy.Signature):
class SQLExtractModule (line 37) | class SQLExtractModule(dspy.Module):
method __init__ (line 38) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 43) | def forward(self, QA_content: str):
class SQlExtractor (line 48) | class SQlExtractor:
method __init__ (line 49) | def __init__(self, dspy_lm: dspy.LM, complied_program_path: Optional[s...
method gen (line 54) | def gen(self, QA_content: str) -> SQLSample:
FILE: backend/app/experiments/sql_sample_gen.py
class Step (line 10) | class Step(BaseModel):
class SQLSample (line 15) | class SQLSample(BaseModel):
class SampleGen (line 24) | class SampleGen(dspy.Signature):
class SQLGenModule (line 50) | class SQLGenModule(dspy.Module):
method __init__ (line 51) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 56) | def forward(self, QA_content: str):
class SQlGenerator (line 61) | class SQlGenerator:
method __init__ (line 62) | def __init__(self, dspy_lm: dspy.LM, complied_program_path: Optional[s...
method gen (line 67) | def gen(self, QA_content: str) -> SQLSample:
FILE: backend/app/file_storage/__init__.py
function get_file_storage (line 5) | def get_file_storage() -> FileStorage:
FILE: backend/app/file_storage/base.py
class FileStorage (line 6) | class FileStorage(ABC):
method open (line 8) | def open(self, name: str, mode: str = "rb") -> IO:
method save (line 12) | def save(self, name: str, content: IO) -> None:
method delete (line 16) | def delete(self, name: str) -> None:
method exists (line 20) | def exists(self, name: str) -> bool:
method size (line 24) | def size(self, name: str) -> int:
FILE: backend/app/file_storage/local.py
class LocalFileStorage (line 8) | class LocalFileStorage(FileStorage):
method path (line 9) | def path(self, name: str) -> str:
method open (line 12) | def open(self, name: str, mode: str = "rb") -> IO:
method save (line 15) | def save(self, name: str, content: IO) -> None:
method delete (line 21) | def delete(self, name: str) -> None:
method exists (line 24) | def exists(self, name: str) -> bool:
method size (line 27) | def size(self, name: str) -> int:
FILE: backend/app/models/api_key.py
class BaseApiKey (line 12) | class BaseApiKey(UpdatableBaseModel):
class ApiKey (line 19) | class ApiKey(BaseApiKey, table=True):
class PublicApiKey (line 32) | class PublicApiKey(BaseApiKey):
FILE: backend/app/models/auth.py
class User (line 17) | class User(UUIDBaseModel, UpdatableBaseModel, table=True):
class UserSession (line 27) | class UserSession(SQLModel, table=True):
FILE: backend/app/models/base.py
class UUIDBaseModel (line 13) | class UUIDBaseModel(SQLModel):
class UpdatableBaseModel (line 22) | class UpdatableBaseModel(SQLModel):
function get_aes_key (line 36) | def get_aes_key() -> bytes:
class AESEncryptedColumn (line 40) | class AESEncryptedColumn(TypeDecorator):
method process_bind_param (line 43) | def process_bind_param(self, value, dialect):
method process_result_value (line 49) | def process_result_value(self, value, dialect):
class IntEnumType (line 56) | class IntEnumType(TypeDecorator):
method __init__ (line 69) | def __init__(self, enum_class, *args, **kwargs):
method process_bind_param (line 73) | def process_bind_param(self, value, dialect):
method process_result_value (line 81) | def process_result_value(self, value, dialect):
FILE: backend/app/models/chat.py
class ChatVisibility (line 18) | class ChatVisibility(int, enum.Enum):
class Chat (line 23) | class Chat(UUIDBaseModel, UpdatableBaseModel, table=True):
class ChatUpdate (line 55) | class ChatUpdate(BaseModel):
class ChatFilters (line 60) | class ChatFilters(BaseModel):
class ChatOrigin (line 70) | class ChatOrigin(BaseModel):
FILE: backend/app/models/chat_engine.py
class ChatEngine (line 16) | class ChatEngine(UpdatableBaseModel, table=True):
class ChatEngineUpdate (line 45) | class ChatEngineUpdate(BaseModel):
FILE: backend/app/models/chat_message.py
class ChatMessage (line 19) | class ChatMessage(UpdatableBaseModel, table=True):
FILE: backend/app/models/chunk.py
class KgIndexStatus (line 24) | class KgIndexStatus(str, enum.Enum):
function get_kb_chunk_model (line 32) | def get_kb_chunk_model(kb: KnowledgeBase) -> Type[SQLModel]:
function get_dynamic_chunk_model (line 38) | def get_dynamic_chunk_model(
FILE: backend/app/models/data_source.py
class DataSourceType (line 19) | class DataSourceType(str, Enum):
class DataSource (line 25) | class DataSource(UpdatableBaseModel, table=True):
FILE: backend/app/models/document.py
class DocIndexTaskStatus (line 22) | class DocIndexTaskStatus(str, enum.Enum):
class ContentFormat (line 30) | class ContentFormat(str, enum.Enum):
class Document (line 35) | class Document(UpdatableBaseModel, table=True):
method to_llama_document (line 73) | def to_llama_document(self) -> LlamaDocument:
FILE: backend/app/models/embed_model.py
class EmbeddingModel (line 12) | class EmbeddingModel(UpdatableBaseModel, table=True):
FILE: backend/app/models/entity.py
class EntityType (line 21) | class EntityType(str, enum.Enum):
method __str__ (line 25) | def __str__(self):
class EntityPublic (line 29) | class EntityPublic(BaseModel):
function get_kb_entity_model (line 38) | def get_kb_entity_model(kb: KnowledgeBase) -> Type[SQLModel]:
function get_dynamic_entity_model (line 44) | def get_dynamic_entity_model(
FILE: backend/app/models/evaluation_dataset.py
class EvaluationDataset (line 15) | class EvaluationDataset(UpdatableBaseModel, table=True):
class EvaluationDatasetItem (line 34) | class EvaluationDatasetItem(UpdatableBaseModel, table=True):
FILE: backend/app/models/evaluation_task.py
class EvaluationStatus (line 17) | class EvaluationStatus(str, enum.Enum):
class EvaluationTask (line 25) | class EvaluationTask(UpdatableBaseModel, table=True):
class EvaluationTaskItem (line 46) | class EvaluationTaskItem(UpdatableBaseModel, table=True):
FILE: backend/app/models/feedback.py
class FeedbackType (line 15) | class FeedbackType(str, enum.Enum):
method adjust_relationship_weight (line 20) | def adjust_relationship_weight(cls, feedback_type):
class BaseFeedback (line 25) | class BaseFeedback(UpdatableBaseModel):
class Feedback (line 34) | class Feedback(BaseFeedback, table=True):
class AdminFeedbackPublic (line 61) | class AdminFeedbackPublic(BaseFeedback):
class FeedbackFilters (line 70) | class FeedbackFilters(BaseModel):
class FeedbackOrigin (line 79) | class FeedbackOrigin(BaseModel):
FILE: backend/app/models/knowledge_base.py
class IndexMethod (line 36) | class IndexMethod(str, enum.Enum):
class KnowledgeBaseDataSource (line 41) | class KnowledgeBaseDataSource(SQLModel, table=True):
class ChunkSplitter (line 51) | class ChunkSplitter(str, enum.Enum):
class SentenceSplitterOptions (line 56) | class SentenceSplitterOptions(BaseModel):
class MarkdownNodeParserOptions (line 73) | class MarkdownNodeParserOptions(BaseModel):
class ChunkSplitterConfig (line 87) | class ChunkSplitterConfig(BaseModel):
class ChunkingMode (line 94) | class ChunkingMode(str, enum.Enum):
class BaseChunkingConfig (line 99) | class BaseChunkingConfig(BaseModel):
class GeneralChunkingConfig (line 103) | class GeneralChunkingConfig(BaseChunkingConfig):
class AdvancedChunkingConfig (line 110) | class AdvancedChunkingConfig(BaseChunkingConfig):
class KnowledgeBase (line 120) | class KnowledgeBase(SQLModel, table=True):
method __init__ (line 178) | def __init__(self, **kwargs):
method __hash__ (line 182) | def __hash__(self):
method get_data_source_by_id (line 185) | def get_data_source_by_id(self, data_source_id: int) -> Optional[DataS...
method must_get_data_source_by_id (line 195) | def must_get_data_source_by_id(self, data_source_id: int) -> DataSource:
method to_descriptor (line 201) | def to_descriptor(self) -> KnowledgeBaseDescriptor:
FILE: backend/app/models/knowledge_base_scoped/table_naming.py
function get_kb_chunks_table_name (line 22) | def get_kb_chunks_table_name(knowledge_base: KnowledgeBase) -> str:
function get_kb_relationships_table_name (line 30) | def get_kb_relationships_table_name(knowledge_base: KnowledgeBase) -> str:
function get_kb_entities_table_name (line 38) | def get_kb_entities_table_name(knowledge_base: KnowledgeBase) -> str:
function get_kb_vector_dims (line 46) | def get_kb_vector_dims(kb: KnowledgeBase):
FILE: backend/app/models/llm.py
class BaseLLM (line 8) | class BaseLLM(UpdatableBaseModel):
class LLM (line 16) | class LLM(BaseLLM, table=True):
class AdminLLM (line 23) | class AdminLLM(BaseLLM):
class LLMUpdate (line 27) | class LLMUpdate(BaseModel):
FILE: backend/app/models/recommend_question.py
class RecommendQuestion (line 13) | class RecommendQuestion(UpdatableBaseModel, table=True):
FILE: backend/app/models/relationship.py
class RelationshipPublic (line 21) | class RelationshipPublic(BaseModel):
function get_kb_relationship_model (line 33) | def get_kb_relationship_model(kb: KnowledgeBase) -> Type[SQLModel]:
function get_dynamic_relationship_model (line 40) | def get_dynamic_relationship_model(
FILE: backend/app/models/reranker_model.py
class BaseRerankerModel (line 10) | class BaseRerankerModel(UpdatableBaseModel):
class RerankerModel (line 19) | class RerankerModel(BaseRerankerModel, table=True):
class AdminRerankerModel (line 26) | class AdminRerankerModel(BaseRerankerModel):
class RerankerModelUpdate (line 30) | class RerankerModelUpdate(BaseModel):
FILE: backend/app/models/semantic_cache.py
class SemanticCache (line 18) | class SemanticCache(SQLModel, table=True):
method __hash__ (line 48) | def __hash__(self):
method screenshot (line 53) | def screenshot(self):
FILE: backend/app/models/site_setting.py
class SiteSetting (line 8) | class SiteSetting(SQLModel, table=True):
FILE: backend/app/models/staff_action_log.py
class StaffActionLog (line 7) | class StaffActionLog(SQLModel, table=True):
FILE: backend/app/models/upload.py
class Upload (line 15) | class Upload(UpdatableBaseModel, table=True):
FILE: backend/app/rag/build_index.py
class IndexService (line 34) | class IndexService:
method __init__ (line 39) | def __init__(
method build_vector_index_for_document (line 51) | def build_vector_index_for_document(
method _get_transformations (line 79) | def _get_transformations(
method build_kg_index_for_chunk (line 138) | def build_kg_index_for_chunk(self, session: Session, db_chunk: Type[SQ...
FILE: backend/app/rag/chat/chat_flow.py
function parse_chat_messages (line 43) | def parse_chat_messages(
class ChatFlow (line 51) | class ChatFlow:
method __init__ (line 54) | def __init__(
method chat (line 167) | def chat(self) -> Generator[ChatEvent | str, None, None]:
method _builtin_chat (line 200) | def _builtin_chat(
method _chat_start (line 260) | def _chat_start(
method _search_knowledge_graph (line 291) | def _search_knowledge_graph(
method _refine_user_question (line 334) | def _refine_user_question(
method _clarify_question (line 381) | def _clarify_question(
method _search_relevance_chunks (line 436) | def _search_relevance_chunks(
method _generate_answer (line 460) | def _generate_answer(
method _post_verification (line 526) | def _post_verification(
method _chat_finish (line 579) | def _chat_finish(
method _external_chat (line 628) | def _external_chat(self) -> Generator[ChatEvent | str, None, None]:
method _generate_goal (line 800) | def _generate_goal(self) -> Generator[ChatEvent, None, Tuple[str, dict]]:
FILE: backend/app/rag/chat/chat_service.py
class ChatResult (line 53) | class ChatResult(BaseModel):
function get_final_chat_result (line 61) | def get_final_chat_result(
function user_can_view_chat (line 94) | def user_can_view_chat(chat: DBChat, user: Optional[User]) -> bool:
function user_can_edit_chat (line 102) | def user_can_edit_chat(chat: DBChat, user: Optional[User]) -> bool:
function get_graph_data_from_chat_message (line 110) | def get_graph_data_from_chat_message(
function get_chat_message_subgraph (line 176) | def get_chat_message_subgraph(
function check_rag_required_config (line 206) | def check_rag_required_config(session: Session) -> RequiredConfigStatus:
function check_rag_optional_config (line 224) | def check_rag_optional_config(session: Session) -> OptionalConfigStatus:
function check_rag_config_need_migration (line 237) | def check_rag_config_need_migration(session: Session) -> NeedMigrationSt...
function remove_chat_message_recommend_questions (line 257) | def remove_chat_message_recommend_questions(
function get_chat_message_recommend_questions (line 268) | def get_chat_message_recommend_questions(
FILE: backend/app/rag/chat/config.py
class LLMOption (line 39) | class LLMOption(BaseModel):
class VectorSearchOption (line 49) | class VectorSearchOption(VectorSearchRetrieverConfig):
class KnowledgeGraphOption (line 53) | class KnowledgeGraphOption(KnowledgeGraphRetrieverConfig):
class ExternalChatEngine (line 58) | class ExternalChatEngine(BaseModel):
class LinkedKnowledgeBase (line 63) | class LinkedKnowledgeBase(BaseModel):
class KnowledgeBaseOption (line 67) | class KnowledgeBaseOption(BaseModel):
class ChatEngineConfig (line 74) | class ChatEngineConfig(BaseModel):
method is_external_engine (line 97) | def is_external_engine(self) -> bool:
method get_db_chat_engine (line 103) | def get_db_chat_engine(self) -> Optional[DBChatEngine]:
method get_linked_knowledge_base (line 106) | def get_linked_knowledge_base(self, session: Session) -> KnowledgeBase...
method load_from_db (line 114) | def load_from_db(cls, session: Session, engine_name: str) -> "ChatEngi...
method get_llama_llm (line 133) | def get_llama_llm(self, session: Session) -> LLM:
method get_dspy_lm (line 143) | def get_dspy_lm(self, session: Session) -> dspy.LM:
method get_fast_llama_llm (line 147) | def get_fast_llama_llm(self, session: Session) -> LLM:
method get_fast_dspy_lm (line 157) | def get_fast_dspy_lm(self, session: Session) -> dspy.LM:
method get_reranker (line 162) | def get_reranker(
method get_metadata_filter (line 177) | def get_metadata_filter(self) -> BaseNodePostprocessor:
method get_knowledge_bases (line 180) | def get_knowledge_bases(self, db_session: Session) -> List[KnowledgeBa...
method screenshot (line 196) | def screenshot(self) -> dict:
FILE: backend/app/rag/chat/retrieve/retrieve_flow.py
class SourceDocument (line 31) | class SourceDocument(BaseModel):
class RetrieveFlow (line 37) | class RetrieveFlow:
method __init__ (line 38) | def __init__(
method retrieve (line 66) | def retrieve(self, user_question: str) -> List[NodeWithScore]:
method retrieve_documents (line 77) | def retrieve_documents(self, user_question: str) -> List[DBDocument]:
method search_knowledge_graph (line 81) | def search_knowledge_graph(
method _get_knowledge_graph_context (line 101) | def _get_knowledge_graph_context(
method _refine_user_question (line 120) | def _refine_user_question(
method search_relevant_chunks (line 134) | def search_relevant_chunks(self, user_question: str) -> List[NodeWithS...
method get_documents_from_nodes (line 144) | def get_documents_from_nodes(self, nodes: List[NodeWithScore]) -> List...
method get_source_documents_from_nodes (line 150) | def get_source_documents_from_nodes(
FILE: backend/app/rag/chat/stream_protocol.py
class ChatStreamPayload (line 10) | class ChatStreamPayload:
method dump (line 11) | def dump(self):
class ChatStreamDataPayload (line 16) | class ChatStreamDataPayload(ChatStreamPayload):
method dump (line 21) | def dump(self):
class ChatStreamMessagePayload (line 32) | class ChatStreamMessagePayload(ChatStreamPayload):
method dump (line 38) | def dump(self):
class ChatEvent (line 57) | class ChatEvent:
method encode (line 61) | def encode(self, charset) -> bytes:
FILE: backend/app/rag/datasource/__init__.py
function get_data_source_loader (line 12) | def get_data_source_loader(
FILE: backend/app/rag/datasource/base.py
class BaseDataSource (line 9) | class BaseDataSource(ABC):
method __init__ (line 16) | def __init__(
method validate_config (line 33) | def validate_config(self):
method load_documents (line 37) | def load_documents(self) -> Generator[Document, None, None]:
FILE: backend/app/rag/datasource/file.py
class FileConfig (line 17) | class FileConfig(BaseModel):
class FileDataSource (line 21) | class FileDataSource(BaseDataSource):
method validate_config (line 22) | def validate_config(self):
method load_documents (line 28) | def load_documents(self) -> Generator[Document, None, None]:
function extract_text_from_pdf (line 70) | def extract_text_from_pdf(file: IO) -> str:
function extract_text_from_docx (line 75) | def extract_text_from_docx(file: IO) -> str:
function extract_text_from_pptx (line 83) | def extract_text_from_pptx(file: IO) -> str:
function extract_text_from_xlsx (line 93) | def extract_text_from_xlsx(file: IO) -> str:
FILE: backend/app/rag/datasource/web_base.py
function load_web_documents (line 14) | def load_web_documents(
FILE: backend/app/rag/datasource/web_single_page.py
class WebSinglePageConfig (line 12) | class WebSinglePageConfig(BaseModel):
class WebSinglePageDataSource (line 16) | class WebSinglePageDataSource(BaseDataSource):
method validate_config (line 17) | def validate_config(self):
method load_documents (line 20) | def load_documents(self) -> Generator[Document, None, None]:
FILE: backend/app/rag/datasource/web_sitemap.py
class WebSitemapConfig (line 16) | class WebSitemapConfig(BaseModel):
function _ensure_absolute_url (line 20) | def _ensure_absolute_url(source_url: str, maybe_relative_url: str) -> str:
function extract_urls_from_sitemap (line 26) | def extract_urls_from_sitemap(sitemap_url: str) -> list[str]:
class WebSitemapDataSource (line 40) | class WebSitemapDataSource(BaseDataSource):
method validate_config (line 41) | def validate_config(self):
method load_documents (line 44) | def load_documents(self) -> Generator[Document, None, None]:
FILE: backend/app/rag/embeddings/local/local_embedding.py
class _APICaller (line 14) | class _APICaller:
method __init__ (line 15) | def __init__(
method get_embeddings (line 27) | def get_embeddings(self, sentences: list[str]) -> List[List[float]]:
method aget_embeddings (line 47) | async def aget_embeddings(self, sentences: list[str]) -> List[List[flo...
class LocalEmbedding (line 64) | class LocalEmbedding(BaseEmbedding):
method __init__ (line 74) | def __init__(
method class_name (line 94) | def class_name(cls) -> str:
method _get_query_embedding (line 97) | def _get_query_embedding(self, query: str) -> List[float]:
method _aget_query_embedding (line 101) | async def _aget_query_embedding(self, query: str) -> List[float]:
method _get_text_embedding (line 106) | def _get_text_embedding(self, text: str) -> List[float]:
method _aget_text_embedding (line 110) | async def _aget_text_embedding(self, text: str) -> List[float]:
method _get_text_embeddings (line 115) | def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
method _aget_text_embeddings (line 118) | async def _aget_text_embeddings(
FILE: backend/app/rag/embeddings/open_like/openai_like_embedding.py
class OpenAILikeEmbedding (line 10) | class OpenAILikeEmbedding(BaseEmbedding):
method __init__ (line 21) | def __init__(
method get_embeddings (line 42) | def get_embeddings(self, sentences: list[str]) -> List[List[float]]:
method aget_embeddings (line 52) | async def aget_embeddings(self, sentences: list[str]) -> List[List[flo...
method class_name (line 62) | def class_name(cls) -> str:
method _get_query_embedding (line 65) | def _get_query_embedding(self, query: str) -> List[float]:
method _aget_query_embedding (line 69) | async def _aget_query_embedding(self, query: str) -> List[float]:
method _get_text_embedding (line 74) | def _get_text_embedding(self, text: str) -> List[float]:
method _aget_text_embedding (line 78) | async def _aget_text_embedding(self, text: str) -> List[float]:
method _get_text_embeddings (line 83) | def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
method _aget_text_embeddings (line 86) | async def _aget_text_embeddings(
FILE: backend/app/rag/embeddings/provider.py
class EmbeddingProvider (line 7) | class EmbeddingProvider(str, enum.Enum):
class EmbeddingProviderOption (line 19) | class EmbeddingProviderOption(BaseModel):
FILE: backend/app/rag/embeddings/resolver.py
function resolve_embed_model (line 20) | def resolve_embed_model(
function get_default_embed_model (line 86) | def get_default_embed_model(session: Session) -> Optional[BaseEmbedding]:
function must_get_default_embed_model (line 98) | def must_get_default_embed_model(session: Session) -> BaseEmbedding:
FILE: backend/app/rag/indices/knowledge_graph/base.py
class KnowledgeGraphIndex (line 21) | class KnowledgeGraphIndex(BaseIndex[IndexLPG]):
method __init__ (line 42) | def __init__(
method from_existing (line 60) | def from_existing(
method _insert_nodes (line 82) | def _insert_nodes(self, nodes: Sequence[BaseNode]):
method _build_index_from_nodes (line 95) | def _build_index_from_nodes(self, nodes: Optional[Sequence[BaseNode]])...
method as_retriever (line 100) | def as_retriever(self, **kwargs: Any):
method _insert (line 108) | def _insert(self, nodes: Sequence[BaseNode], **insert_kwargs: Any) -> ...
method ref_doc_info (line 112) | def ref_doc_info(self) -> Dict[str, RefDocInfo]:
method _delete_node (line 119) | def _delete_node(self, node_id: str, **delete_kwargs: Any) -> None:
FILE: backend/app/rag/indices/knowledge_graph/extractor.py
class ExtractGraphTriplet (line 21) | class ExtractGraphTriplet(dspy.Signature):
class ExtractCovariate (line 61) | class ExtractCovariate(dspy.Signature):
function get_relation_metadata_from_node (line 83) | def get_relation_metadata_from_node(node: BaseNode):
class Extractor (line 97) | class Extractor(dspy.Module):
method __init__ (line 98) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 104) | def forward(self, text):
class SimpleGraphExtractor (line 131) | class SimpleGraphExtractor:
method __init__ (line 132) | def __init__(
method extract (line 139) | def extract(self, text: str, node: BaseNode):
method _to_df (line 152) | def _to_df(
FILE: backend/app/rag/indices/knowledge_graph/graph_store/helpers.py
function get_weight_score (line 30) | def get_weight_score(
function get_degree_score (line 47) | def get_degree_score(in_degree: int, out_degree: int, degree_coefficient...
function calculate_relationship_score (line 51) | def calculate_relationship_score(
function get_default_embed_model (line 70) | def get_default_embed_model() -> BaseEmbedding:
function get_query_embedding (line 74) | def get_query_embedding(query: str, embed_model: BaseEmbedding = None) -...
function get_text_embedding (line 80) | def get_text_embedding(text: str, embed_model: BaseEmbedding = None) -> ...
function get_entity_description_embedding (line 86) | def get_entity_description_embedding(
function get_entity_metadata_embedding (line 93) | def get_entity_metadata_embedding(
function get_relationship_description_embedding (line 100) | def get_relationship_description_embedding(
FILE: backend/app/rag/indices/knowledge_graph/graph_store/schema.py
class KnowledgeGraphStore (line 7) | class KnowledgeGraphStore(ABC):
method save (line 9) | def save(self, entities_df, relationships_df) -> None:
method retrieve_with_weight (line 14) | def retrieve_with_weight(
FILE: backend/app/rag/indices/knowledge_graph/graph_store/tidb_graph_editor.py
class TiDBGraphEditor (line 22) | class TiDBGraphEditor:
method __init__ (line 26) | def __init__(
method get_entity (line 44) | def get_entity(self, session: Session, entity_id: int) -> Optional[SQL...
method update_entity (line 47) | def update_entity(
method get_entity_subgraph (line 87) | def get_entity_subgraph(
method get_relationship (line 117) | def get_relationship(
method update_relationship (line 122) | def update_relationship(
method search_similar_entities (line 152) | def search_similar_entities(
method create_synopsis_entity (line 163) | def create_synopsis_entity(
FILE: backend/app/rag/indices/knowledge_graph/graph_store/tidb_graph_store.py
function cosine_distance (line 49) | def cosine_distance(v1, v2):
class MergeEntities (line 53) | class MergeEntities(dspy.Signature):
class MergeEntitiesProgram (line 71) | class MergeEntitiesProgram(dspy.Module):
method __init__ (line 72) | def __init__(self):
method forward (line 75) | def forward(self, entities: List[Entity]):
class TiDBGraphStore (line 83) | class TiDBGraphStore(KnowledgeGraphStore):
method __init__ (line 84) | def __init__(
method ensure_table_schema (line 117) | def ensure_table_schema(self) -> None:
method drop_table_schema (line 163) | def drop_table_schema(self) -> None:
method close_session (line 193) | def close_session(self) -> None:
method save (line 198) | def save(self, chunk_id, entities_df, relationships_df):
method create_relationship (line 284) | def create_relationship(
method get_subgraph_by_relationship_ids (line 315) | def get_subgraph_by_relationship_ids(
method get_or_create_entity (line 367) | def get_or_create_entity(self, entity: Entity, commit: bool = True) ->...
method _try_merge_entities (line 468) | def _try_merge_entities(self, entities: List[Entity]) -> Entity:
method retrieve_with_weight (line 478) | def retrieve_with_weight(
method fetch_entity_degrees (line 591) | def fetch_entity_degrees(
method search_relationships_weight (line 632) | def search_relationships_weight(
method fetch_similar_entities_by_post_filter (line 755) | def fetch_similar_entities_by_post_filter(
method fetch_similar_entities (line 796) | def fetch_similar_entities(
method retrieve_graph_data (line 833) | def retrieve_graph_data(
method retrieve_neighbors (line 934) | def retrieve_neighbors(
method get_chunks_by_relationships (line 1052) | def get_chunks_by_relationships(
method get_entire_knowledge_graph (line 1129) | def get_entire_knowledge_graph(self) -> RetrievedKnowledgeGraph:
method stream_entire_knowledge_graph (line 1186) | def stream_entire_knowledge_graph(self, chunk_size: int = 5000):
FILE: backend/app/rag/indices/knowledge_graph/schema.py
class Entity (line 5) | class Entity(BaseModel):
class EntityWithID (line 25) | class EntityWithID(Entity):
class SynopsisInfo (line 31) | class SynopsisInfo(BaseModel):
class SynopsisEntity (line 42) | class SynopsisEntity(Entity):
class ExistingSynopsisEntity (line 50) | class ExistingSynopsisEntity(SynopsisEntity):
class Relationship (line 56) | class Relationship(BaseModel):
class RelationshipReasoning (line 73) | class RelationshipReasoning(Relationship):
class KnowledgeGraph (line 83) | class KnowledgeGraph(BaseModel):
class EntityCovariateInput (line 94) | class EntityCovariateInput(BaseModel):
class EntityCovariateOutput (line 101) | class EntityCovariateOutput(BaseModel):
class DecomposedFactors (line 113) | class DecomposedFactors(BaseModel):
FILE: backend/app/rag/indices/vector_search/vector_store/tidb_vector_store.py
function node_to_relation_dict (line 32) | def node_to_relation_dict(node: BaseNode) -> dict:
class TiDBVectorStore (line 44) | class TiDBVectorStore(BasePydanticVectorStore):
method __init__ (line 53) | def __init__(
method ensure_table_schema (line 73) | def ensure_table_schema(self) -> None:
method drop_table_schema (line 93) | def drop_table_schema(self):
method close_session (line 105) | def close_session(self) -> None:
method client (line 111) | def client(self) -> Any:
method class_name (line 116) | def class_name(cls) -> str:
method add (line 119) | def add(self, nodes: List[BaseNode], **add_kwargs: Any) -> List[str]:
method delete (line 149) | def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:
method query (line 167) | def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStore...
FILE: backend/app/rag/knowledge_base/config.py
function get_kb_llm (line 15) | def get_kb_llm(session: Session, kb: KnowledgeBase):
function get_kb_dspy_llm (line 25) | def get_kb_dspy_llm(session: Session, kb: KnowledgeBase):
function get_kb_embed_model (line 30) | def get_kb_embed_model(session: Session, kb: KnowledgeBase) -> BaseEmbed...
FILE: backend/app/rag/knowledge_base/index_store.py
function get_kb_tidb_vector_store (line 13) | def get_kb_tidb_vector_store(session: Session, kb: KnowledgeBase) -> TiD...
function init_kb_tidb_vector_store (line 19) | def init_kb_tidb_vector_store(session: Session, kb: KnowledgeBase) -> Ti...
function get_kb_tidb_graph_store (line 25) | def get_kb_tidb_graph_store(session: Session, kb: KnowledgeBase) -> TiDB...
function init_kb_tidb_graph_store (line 45) | def init_kb_tidb_graph_store(session: Session, kb: KnowledgeBase) -> TiD...
function get_kb_tidb_graph_editor (line 51) | def get_kb_tidb_graph_editor(session: Session, kb: KnowledgeBase) -> TiD...
FILE: backend/app/rag/knowledge_base/schema.py
class KBIndexType (line 4) | class KBIndexType(str, Enum):
FILE: backend/app/rag/llms/dspy.py
function get_dspy_lm_by_llama_llm (line 6) | def get_dspy_lm_by_llama_llm(llama_llm: BaseLLM) -> dspy.LM:
function enforce_trailing_slash (line 81) | def enforce_trailing_slash(url: str):
FILE: backend/app/rag/llms/provider.py
class LLMProvider (line 7) | class LLMProvider(str, enum.Enum):
class LLMProviderOption (line 19) | class LLMProviderOption(BaseModel):
FILE: backend/app/rag/llms/resolver.py
function resolve_llm (line 10) | def resolve_llm(
function get_llm_by_id (line 108) | def get_llm_by_id(session: Session, llm_id: int) -> Optional[LLM]:
function must_get_llm_by_id (line 120) | def must_get_llm_by_id(session: Session, llm_id: int) -> LLM:
function get_default_llm (line 130) | def get_default_llm(session: Session) -> Optional[LLM]:
function must_get_default_llm (line 142) | def must_get_default_llm(session: Session) -> LLM:
function get_llm_or_default (line 152) | def get_llm_or_default(session: Session, llm_id: Optional[int]) -> LLM:
FILE: backend/app/rag/node_parser/file/markdown.py
class MarkdownNodeParser (line 17) | class MarkdownNodeParser(NodeParser):
method __init__ (line 38) | def __init__(
method from_defaults (line 58) | def from_defaults(
method class_name (line 79) | def class_name(cls) -> str:
method _parse_nodes (line 83) | def _parse_nodes(
method get_nodes_from_node (line 120) | def get_nodes_from_node(
method _normalize_node_sizes (line 169) | def _normalize_node_sizes(
method _update_metadata (line 243) | def _update_metadata(
method _build_node_from_split (line 261) | def _build_node_from_split(
method _token_size (line 275) | def _token_size(self, text: str) -> int:
FILE: backend/app/rag/postprocessors/metadata_post_filter.py
function simple_filter_to_metadata_filters (line 18) | def simple_filter_to_metadata_filters(filters: SimpleMetadataFilter) -> ...
class MetadataPostFilter (line 34) | class MetadataPostFilter(BaseNodePostprocessor):
method __init__ (line 37) | def __init__(
method _postprocess_nodes (line 48) | def _postprocess_nodes(
method match_all_filters (line 63) | def match_all_filters(self, node: BaseNode) -> bool:
FILE: backend/app/rag/query_dispatcher.py
class QueryDispatcher (line 16) | class QueryDispatcher:
method __init__ (line 17) | def __init__(self, llm: OpenAI, system_prompt: Optional[str] = None):
method route (line 24) | def route(self, query: str, tools: Sequence["BaseTool"]) -> str:
function answer (line 41) | def answer(query: str) -> str:
FILE: backend/app/rag/question_gen/helpers.py
function get_query_bundle_from_chat (line 7) | def get_query_bundle_from_chat(
FILE: backend/app/rag/question_gen/query_decomposer.py
class SubQuestion (line 9) | class SubQuestion(BaseModel):
class SubQuestions (line 20) | class SubQuestions(BaseModel):
class DecomposeQuery (line 28) | class DecomposeQuery(dspy.Signature):
class DecomposeQueryModule (line 59) | class DecomposeQueryModule(dspy.Module):
method __init__ (line 60) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 65) | def forward(self, query):
class QueryDecomposer (line 70) | class QueryDecomposer:
method __init__ (line 71) | def __init__(self, dspy_lm: dspy.LM, complied_program_path: Optional[s...
method decompose (line 76) | def decompose(self, query: str) -> SubQuestions:
FILE: backend/app/rag/rerankers/baisheng/baisheng_reranker.py
class BaishengRerank (line 17) | class BaishengRerank(BaseNodePostprocessor):
method __init__ (line 32) | def __init__(
method class_name (line 47) | def class_name(cls) -> str:
method _postprocess_nodes (line 50) | def _postprocess_nodes(
FILE: backend/app/rag/rerankers/local/local_reranker.py
class LocalRerank (line 17) | class LocalRerank(BaseNodePostprocessor):
method __init__ (line 31) | def __init__(
method class_name (line 43) | def class_name(cls) -> str:
method _postprocess_nodes (line 46) | def _postprocess_nodes(
FILE: backend/app/rag/rerankers/provider.py
class RerankerProvider (line 6) | class RerankerProvider(str, enum.Enum):
class RerankerProviderOption (line 16) | class RerankerProviderOption(BaseModel):
FILE: backend/app/rag/rerankers/resolver.py
function resolve_reranker_by_id (line 18) | def resolve_reranker_by_id(
function resolve_reranker (line 31) | def resolve_reranker(
function get_default_reranker_model (line 92) | def get_default_reranker_model(
function must_get_default_reranker_model (line 108) | def must_get_default_reranker_model(session: Session) -> BaseNodePostpro...
FILE: backend/app/rag/rerankers/vllm/vllm_reranker.py
class VLLMRerank (line 17) | class VLLMRerank(BaseNodePostprocessor):
method __init__ (line 25) | def __init__(
method class_name (line 37) | def class_name(cls) -> str:
method _postprocess_nodes (line 40) | def _postprocess_nodes(
FILE: backend/app/rag/retrievers/chunk/fusion_retriever.py
class ChunkFusionRetriever (line 21) | class ChunkFusionRetriever(MultiKBFusionRetriever, ChunkRetriever):
method __init__ (line 22) | def __init__(
method _fusion (line 54) | def _fusion(
method _simple_fusion (line 59) | def _simple_fusion(
method retrieve_chunks (line 78) | def retrieve_chunks(
FILE: backend/app/rag/retrievers/chunk/helpers.py
function map_nodes_to_chunks (line 8) | def map_nodes_to_chunks(nodes_with_score: List[NodeWithScore]) -> List[R...
FILE: backend/app/rag/retrievers/chunk/schema.py
class RerankerConfig (line 9) | class RerankerConfig(BaseModel):
class MetadataFilterConfig (line 15) | class MetadataFilterConfig(BaseModel):
class VectorSearchRetrieverConfig (line 20) | class VectorSearchRetrieverConfig(BaseModel):
class KBChunkRetrievalConfig (line 28) | class KBChunkRetrievalConfig(BaseModel):
class ChunkRetrievalConfig (line 32) | class ChunkRetrievalConfig(BaseModel):
class RetrievedChunkDocument (line 39) | class RetrievedChunkDocument(BaseModel):
class RetrievedChunk (line 45) | class RetrievedChunk(BaseModel):
class ChunksRetrievalResult (line 53) | class ChunksRetrievalResult(BaseModel):
class ChunkRetriever (line 58) | class ChunkRetriever(ABC):
method retrieve_chunks (line 59) | def retrieve_chunks(
FILE: backend/app/rag/retrievers/chunk/simple_retriever.py
class ChunkSimpleRetriever (line 34) | class ChunkSimpleRetriever(BaseRetriever, ChunkRetriever):
method __init__ (line 37) | def __init__(
method _retrieve (line 83) | def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
method _build_node_list_from_query_result (line 105) | def _build_node_list_from_query_result(
method retrieve_chunks (line 118) | def retrieve_chunks(
FILE: backend/app/rag/retrievers/knowledge_graph/fusion_retriever.py
class KnowledgeGraphFusionRetriever (line 27) | class KnowledgeGraphFusionRetriever(MultiKBFusionRetriever, KnowledgeGra...
method __init__ (line 30) | def __init__(
method retrieve_knowledge_graph (line 66) | def retrieve_knowledge_graph(
method _fusion (line 92) | def _fusion(
method _knowledge_graph_fusion (line 97) | def _knowledge_graph_fusion(
FILE: backend/app/rag/retrievers/knowledge_graph/schema.py
class MetadataFilterConfig (line 17) | class MetadataFilterConfig(BaseModel):
class KnowledgeGraphRetrieverConfig (line 22) | class KnowledgeGraphRetrieverConfig(BaseModel):
class StoredKnowledgeGraphVersion (line 32) | class StoredKnowledgeGraphVersion(int, Enum):
class StoredSubGraph (line 36) | class StoredSubGraph(BaseModel):
class StoredKnowledgeGraph (line 43) | class StoredKnowledgeGraph(StoredSubGraph):
class RetrievedEntity (line 58) | class RetrievedEntity(BaseModel):
method global_id (line 73) | def global_id(self) -> str:
method __hash__ (line 76) | def __hash__(self):
class RetrievedRelationship (line 80) | class RetrievedRelationship(BaseModel):
method global_id (line 98) | def global_id(self) -> str:
method __hash__ (line 101) | def __hash__(self):
class RetrievedSubGraph (line 105) | class RetrievedSubGraph(BaseModel):
class RetrievedKnowledgeGraph (line 122) | class RetrievedKnowledgeGraph(RetrievedSubGraph):
method to_subqueries_dict (line 137) | def to_subqueries_dict(self) -> dict:
method to_stored_graph_dict (line 159) | def to_stored_graph_dict(self) -> dict:
method to_stored_graph (line 163) | def to_stored_graph(self) -> StoredKnowledgeGraph:
class KnowledgeGraphRetriever (line 179) | class KnowledgeGraphRetriever(ABC):
method retrieve_knowledge_graph (line 180) | def retrieve_knowledge_graph(self, query_str: str) -> KnowledgeGraphRe...
class KnowledgeGraphNode (line 211) | class KnowledgeGraphNode(BaseNode):
method __init__ (line 212) | def __init__(self, *args: Any, **kwargs: Any) -> None:
method get_type (line 254) | def get_type(cls) -> str:
method get_content (line 257) | def get_content(self, metadata_mode: MetadataMode = MetadataMode.ALL) ...
method _get_entities_str (line 272) | def _get_entities_str(self) -> str:
method _get_relationships_str (line 282) | def _get_relationships_str(self) -> str:
method _get_knowledge_graph_str (line 295) | def _get_knowledge_graph_str(self) -> str:
method set_content (line 302) | def set_content(self, kg: RetrievedKnowledgeGraph):
method hash (line 321) | def hash(self) -> str:
FILE: backend/app/rag/retrievers/knowledge_graph/simple_retriever.py
class KnowledgeGraphSimpleRetriever (line 23) | class KnowledgeGraphSimpleRetriever(BaseRetriever, KnowledgeGraphRetriev...
method __init__ (line 24) | def __init__(
method _retrieve (line 55) | def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
method retrieve_knowledge_graph (line 80) | def retrieve_knowledge_graph(
FILE: backend/app/rag/retrievers/multiple_knowledge_base.py
class FusionRetrievalBaseConfig (line 22) | class FusionRetrievalBaseConfig(BaseModel):
class MultiKBFusionRetriever (line 28) | class MultiKBFusionRetriever(BaseRetriever):
method __init__ (line 29) | def __init__(
method _retrieve (line 54) | def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
method _gen_sub_queries (line 77) | def _gen_sub_queries(self, query_bundle: QueryBundle) -> List[QueryBun...
method _fusion (line 82) | def _fusion(
FILE: backend/app/rag/semantic_cache/base.py
class SemanticItem (line 16) | class SemanticItem(BaseModel):
class SemanticCandidate (line 23) | class SemanticCandidate(BaseModel):
class SemanticGroup (line 29) | class SemanticGroup(BaseModel):
class QASemanticOutput (line 35) | class QASemanticOutput(BaseModel):
class QASemanticSearchModule (line 55) | class QASemanticSearchModule(dspy.Signature):
class SemanticSearchProgram (line 80) | class SemanticSearchProgram(dspy.Module):
method __init__ (line 81) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 86) | def forward(self, query: str, candidats: SemanticGroup):
class SemanticCacheManager (line 91) | class SemanticCacheManager:
method __init__ (line 92) | def __init__(
method get_query_embedding (line 109) | def get_query_embedding(self, query: str):
method add_cache (line 112) | def add_cache(
method search (line 133) | def search(
FILE: backend/app/rag/types.py
class MyCBEventType (line 6) | class MyCBEventType(str, enum.Enum):
class ChatEventType (line 32) | class ChatEventType(int, enum.Enum):
class ChatMessageSate (line 41) | class ChatMessageSate(int, enum.Enum):
FILE: backend/app/rag/utils.py
function _parse_response_format (line 5) | def _parse_response_format(response_format_str: str) -> Dict[str, str]:
function parse_goal_response_format (line 26) | def parse_goal_response_format(goal: str) -> Tuple[str, Dict[str, str]]:
FILE: backend/app/repositories/base_repo.py
class BaseRepo (line 4) | class BaseRepo:
method get (line 7) | def get(self, session: Session, id: int):
method get_all (line 10) | def get_all(self, session: Session):
method create (line 13) | def create(self, session: Session, obj: SQLModel):
FILE: backend/app/repositories/chat.py
class ChatRepo (line 16) | class ChatRepo(BaseRepo):
method paginate (line 19) | def paginate(
method get (line 55) | def get(
method must_get (line 64) | def must_get(
method update (line 74) | def update(
method delete (line 88) | def delete(self, session: Session, chat: Chat):
method get_last_message (line 93) | def get_last_message(self, session: Session, chat: Chat) -> Optional[C...
method get_messages (line 100) | def get_messages(
method get_message (line 111) | def get_message(
method must_get_message (line 123) | def must_get_message(
method create_message (line 133) | def create_message(
method find_recent_assistant_messages_by_goal (line 153) | def find_recent_assistant_messages_by_goal(
method find_best_answer_for_question (line 190) | def find_best_answer_for_question(
method chat_trend_by_user (line 238) | def chat_trend_by_user(
method chat_trend_by_origin (line 259) | def chat_trend_by_origin(
method list_chat_origins (line 293) | def list_chat_origins(
FILE: backend/app/repositories/chat_engine.py
class ChatEngineRepo (line 15) | class ChatEngineRepo(BaseRepo):
method get (line 18) | def get(
method must_get (line 28) | def must_get(
method paginate (line 36) | def paginate(
method get_default_engine (line 49) | def get_default_engine(self, session: Session) -> Optional[ChatEngine]:
method has_default (line 56) | def has_default(self, session: Session) -> bool:
method get_engine_by_name (line 66) | def get_engine_by_name(self, session: Session, name: str) -> Optional[...
method create (line 73) | def create(self, session: Session, obj: ChatEngine):
method update (line 85) | def update(
method delete (line 106) | def delete(self, session: Session, chat_engine: ChatEngine) -> ChatEng...
FILE: backend/app/repositories/chunk.py
class ChunkRepo (line 12) | class ChunkRepo(BaseRepo):
method __init__ (line 13) | def __init__(self, chunk_model: Type[SQLModel]):
method document_exists_chunks (line 16) | def document_exists_chunks(self, session: Session, document_id: int) -...
method get_documents_by_chunk_ids (line 24) | def get_documents_by_chunk_ids(
method get_document_chunks (line 36) | def get_document_chunks(self, session: Session, document_id: int):
method fetch_by_document_ids (line 41) | def fetch_by_document_ids(self, session: Session, document_ids: list[i...
method count (line 46) | def count(self, session: Session):
method delete_by_datasource (line 49) | def delete_by_datasource(self, session: Session, datasource_id: int):
method delete_by_document (line 58) | def delete_by_document(self, session: Session, document_id: int):
FILE: backend/app/repositories/data_source.py
class DataSourceRepo (line 14) | class DataSourceRepo(BaseRepo):
method paginate (line 17) | def paginate(
method get (line 29) | def get(
method delete (line 40) | def delete(self, session: Session, data_source: DataSource) -> None:
FILE: backend/app/repositories/document.py
class DocumentRepo (line 13) | class DocumentRepo(BaseRepo):
method paginate (line 16) | def paginate(
method must_get (line 63) | def must_get(self, session: Session, doc_id: int) -> Type[Document]:
method delete_by_datasource (line 69) | def delete_by_datasource(self, session: Session, datasource_id: int):
method fetch_by_ids (line 73) | def fetch_by_ids(self, session: Session, document_ids: list[int]) -> l...
FILE: backend/app/repositories/embedding_model.py
class EmbeddingModelRepo (line 18) | class EmbeddingModelRepo(BaseRepo):
method paginate (line 21) | def paginate(
method get (line 31) | def get(self, session: Session, model_id: int) -> Optional[EmbeddingMo...
method must_get (line 34) | def must_get(self, session: Session, model_id: int) -> Type[EmbeddingM...
method exists_any_model (line 40) | def exists_any_model(self, session: Session) -> bool:
method create (line 44) | def create(self, session: Session, create: EmbeddingModelCreate):
method update (line 68) | def update(
method delete (line 82) | def delete(self, session: Session, model: EmbeddingModel):
method get_default (line 95) | def get_default(self, session: Session) -> Type[EmbeddingModel]:
method has_default (line 99) | def has_default(self, session: Session) -> bool:
method must_get_default (line 102) | def must_get_default(self, session: Session) -> Type[EmbeddingModel]:
method _unset_default (line 108) | def _unset_default(self, session: Session):
method set_default (line 115) | def set_default(self, session: Session, model: EmbeddingModel):
FILE: backend/app/repositories/feedback.py
class FeedbackRepo (line 11) | class FeedbackRepo(BaseRepo):
method paginate (line 14) | def paginate(
method list_feedback_origins (line 52) | def list_feedback_origins(
FILE: backend/app/repositories/graph.py
class GraphRepo (line 12) | class GraphRepo:
method __init__ (line 13) | def __init__(
method count_entities (line 23) | def count_entities(self, session: Session):
method count_relationships (line 26) | def count_relationships(self, session: Session):
method delete_orphaned_entities (line 29) | def delete_orphaned_entities(self, session: Session):
method delete_data_source_relationships (line 45) | def delete_data_source_relationships(self, session: Session, datasourc...
method delete_document_relationships (line 57) | def delete_document_relationships(self, session: Session, document_id:...
function get_kb_graph_repo (line 67) | def get_kb_graph_repo(kb: KnowledgeBase) -> GraphRepo:
FILE: backend/app/repositories/knowledge_base.py
class KnowledgeBaseRepo (line 32) | class KnowledgeBaseRepo(BaseRepo):
method paginate (line 35) | def paginate(
method get (line 45) | def get(
method must_get (line 55) | def must_get(
method get_by_ids (line 63) | def get_by_ids(
method update (line 70) | def update(
method delete (line 84) | def delete(self, session: Session, knowledge_base: KnowledgeBase) -> N...
method get_index_overview (line 89) | def get_index_overview(self, session: Session, kb: KnowledgeBase) -> d...
method count_data_sources (line 116) | def count_data_sources(self, session: Session, kb: KnowledgeBase) -> int:
method count_documents (line 123) | def count_documents(self, session: Session, kb: KnowledgeBase) -> int:
method count_chunks (line 128) | def count_chunks(self, session: Session, kb: KnowledgeBase):
method count_relationships (line 132) | def count_relationships(self, session: Session, kb: KnowledgeBase):
method count_entities (line 136) | def count_entities(self, session: Session, kb: KnowledgeBase):
method count_documents_by_vector_index_status (line 140) | def count_documents_by_vector_index_status(
method count_chunks_by_kg_index_status (line 156) | def count_chunks_by_kg_index_status(
method batch_update_document_status (line 172) | def batch_update_document_status(
method set_failed_documents_status_to_pending (line 183) | def set_failed_documents_status_to_pending(
method batch_update_chunk_status (line 196) | def batch_update_chunk_status(
method set_failed_chunks_status_to_pending (line 211) | def set_failed_chunks_status_to_pending(
method list_vector_index_built_errors (line 228) | def list_vector_index_built_errors(
method list_kg_index_built_errors (line 263) | def list_kg_index_built_errors(
method get_kb_datasource (line 303) | def get_kb_datasource(
method must_get_kb_datasource (line 315) | def must_get_kb_datasource(
method add_kb_datasource (line 329) | def add_kb_datasource(
method list_kb_datasources (line 341) | def list_kb_datasources(
method remove_kb_datasource (line 355) | def remove_kb_datasource(
method list_linked_chat_engines (line 369) | def list_linked_chat_engines(
FILE: backend/app/repositories/llm.py
class LLMRepo (line 17) | class LLMRepo(BaseRepo):
method paginate (line 20) | def paginate(self, session: Session, params: Params = Depends()) -> Pa...
method get (line 26) | def get(self, session: Session, llm_id: int) -> Optional[LLM]:
method must_get (line 29) | def must_get(self, session: Session, llm_id: int) -> LLM:
method exists_any_model (line 35) | def exists_any_model(self, session: Session) -> bool:
method create (line 39) | def create(self, session: Session, llm: LLM) -> LLM:
method update (line 55) | def update(self, session: Session, llm: LLM, llm_update: LLMUpdate) ->...
method get_default (line 66) | def get_default(self, session: Session) -> Type[LLM] | None:
method has_default (line 75) | def has_default(self, session: Session) -> bool:
method must_get_default (line 78) | def must_get_default(self, session: Session) -> Type[LLM]:
method _unset_default (line 84) | def _unset_default(self, session: Session):
method set_default (line 87) | def set_default(self, session: Session, llm: LLM) -> LLM:
method delete (line 95) | def delete(self, session: Session, llm: LLM):
FILE: backend/app/repositories/reranker_model.py
class RerankerModelRepo (line 16) | class RerankerModelRepo(BaseRepo):
method paginate (line 19) | def paginate(
method get (line 29) | def get(self, session: Session, model_id: int) -> Optional[RerankerMod...
method must_get (line 32) | def must_get(self, session: Session, model_id: int) -> RerankerModel:
method exists_any_model (line 38) | def exists_any_model(self, session: Session) -> bool:
method create (line 42) | def create(self, session: Session, reranker_model: RerankerModel) -> R...
method update (line 58) | def update(
method delete (line 72) | def delete(self, db_session: Session, reranker_model: RerankerModel):
method get_default (line 85) | def get_default(self, session: Session) -> Optional[RerankerModel]:
method has_default (line 89) | def has_default(self, session: Session) -> bool:
method must_get_default (line 92) | def must_get_default(self, session: Session) -> RerankerModel:
method unset_default (line 98) | def unset_default(self, session: Session):
method set_default (line 101) | def set_default(self, session: Session, model: RerankerModel):
FILE: backend/app/repositories/staff_action_log.py
class StaffActionRepo (line 7) | class StaffActionRepo(BaseRepo):
method create_staff_action_log (line 10) | def create_staff_action_log(
FILE: backend/app/repositories/user.py
class UserRepo (line 9) | class UserRepo(BaseRepo):
method search_users (line 12) | def search_users(
FILE: backend/app/site_settings/__init__.py
function get_settings_from_db (line 14) | def get_settings_from_db(session: Session):
function get_db_last_updated_at (line 21) | def get_db_last_updated_at(session: Session):
class SiteSettingProxy (line 42) | class SiteSettingProxy:
method update_db_cache (line 48) | def update_db_cache(self, force_check=False):
method get_db_cache (line 66) | def get_db_cache(self) -> dict:
method __getattr__ (line 71) | def __getattr__(self, name: str) -> SettingType:
method get_setting (line 74) | def get_setting(self, name: str) -> SettingType:
method get_all_settings (line 83) | def get_all_settings(
method get_client_settings (line 103) | def get_client_settings(self) -> dict:
method setting_exists (line 109) | def setting_exists(self, name: str) -> bool:
method update_setting (line 112) | def update_setting(self, session: Session, name: str, value: SettingTy...
FILE: backend/app/site_settings/default.py
class DefaultSettings (line 11) | class DefaultSettings:
method __init__ (line 17) | def __init__(self):
method load_default_from_yaml (line 23) | def load_default_from_yaml(self):
FILE: backend/app/site_settings/types.py
class SettingValue (line 9) | class SettingValue:
FILE: backend/app/staff_action/__init__.py
function create_staff_action_log (line 6) | def create_staff_action_log(
FILE: backend/app/tasks/build_index.py
function build_index_for_document (line 27) | def build_index_for_document(self, knowledge_base_id: int, document_id: ...
function build_kg_index_for_chunk (line 96) | def build_kg_index_for_chunk(knowledge_base_id: int, chunk_id: UUID):
FILE: backend/app/tasks/evaluate.py
function add_evaluation_task (line 42) | def add_evaluation_task(evaluation_task_id: int):
function add_evaluation_task_item (line 68) | def add_evaluation_task_item(evaluation_task_item_id: int):
function evaluate_task (line 114) | def evaluate_task(evaluation_task_item: EvaluationTaskItem):
function generate_answer_by_autoflow (line 184) | def generate_answer_by_autoflow(
function parse_langfuse_trace_id_from_url (line 214) | def parse_langfuse_trace_id_from_url(trace_url: str) -> str:
FILE: backend/app/tasks/knowledge_base.py
function import_documents_for_knowledge_base (line 30) | def import_documents_for_knowledge_base(kb_id: int):
function import_documents_from_kb_datasource (line 48) | def import_documents_from_kb_datasource(kb_id: int, data_source_id: int):
function stats_for_knowledge_base (line 86) | def stats_for_knowledge_base(kb_id: int):
function purge_knowledge_base_related_resources (line 108) | def purge_knowledge_base_related_resources(kb_id: int):
function purge_kb_datasource_related_resources (line 173) | def purge_kb_datasource_related_resources(kb_id: int, datasource_id: int):
FILE: backend/app/types.py
class MimeTypes (line 4) | class MimeTypes(str, enum.Enum):
FILE: backend/app/utils/aes.py
class AESCipher (line 7) | class AESCipher:
method __init__ (line 8) | def __init__(self, key: bytes) -> None:
method encrypt (line 12) | def encrypt(self, plain_text: str) -> bytes:
method decrypt (line 24) | def decrypt(self, encrypted_text: bytes) -> str:
FILE: backend/app/utils/namespace.py
function format_namespace (line 4) | def format_namespace(namespace: Optional[str] = None) -> str:
FILE: backend/app/utils/singleflight_cache.py
function singleflight_cache (line 5) | def singleflight_cache(func):
FILE: backend/app/utils/tracing.py
class LangfuseContextManager (line 8) | class LangfuseContextManager:
method __init__ (line 11) | def __init__(self, instrumentor: LlamaIndexInstrumentor):
method observe (line 15) | def observe(self, **kwargs):
method span (line 29) | def span(
method trace_id (line 59) | def trace_id(self) -> Optional[str]:
method trace_url (line 66) | def trace_url(self) -> Optional[str]:
FILE: backend/app/utils/uuid6.py
class UUID (line 16) | class UUID(uuid.UUID):
method __init__ (line 19) | def __init__(
method subsec (line 56) | def subsec(self) -> int:
method time (line 60) | def time(self) -> int:
function _subsec_decode (line 72) | def _subsec_decode(value: int) -> int:
function _subsec_encode (line 76) | def _subsec_encode(value: int) -> int:
function uuid6 (line 84) | def uuid6(clock_seq: int = None) -> UUID:
function uuid7 (line 113) | def uuid7() -> UUID:
FILE: backend/bootstrap.py
function ensure_admin_user (line 12) | async def ensure_admin_user(
function reset_admin_password (line 40) | async def reset_admin_password(
function ensure_default_chat_engine (line 63) | async def ensure_default_chat_engine(session: AsyncSession) -> None:
function bootstrap (line 80) | async def bootstrap(
function main (line 98) | def main(email: str | None, password: str | None, reset_password: bool):
FILE: backend/dspy_program.py
function save_decompose_query_program (line 7) | def save_decompose_query_program():
FILE: backend/local_embedding_reranker/main.py
function healthz (line 33) | def healthz():
function get_embedding_model (line 41) | def get_embedding_model(model_name: str) -> SentenceTransformer:
function get_reranker_model (line 53) | def get_reranker_model(model_name: str) -> CrossEncoder:
class EmbeddingRequest (line 66) | class EmbeddingRequest(BaseModel):
class EmbeddingResponse (line 72) | class EmbeddingResponse(BaseModel):
function get_texts_embedding (line 78) | def get_texts_embedding(request: EmbeddingRequest) -> EmbeddingResponse:
class RerankerRequest (line 90) | class RerankerRequest(BaseModel):
class RerankerResponse (line 96) | class RerankerResponse(BaseModel):
function reranker_texts (line 102) | def reranker_texts(request: RerankerRequest) -> RerankerResponse:
function lifespan (line 110) | async def lifespan(app: FastAPI):
FILE: backend/main.py
function custom_generate_unique_id (line 62) | def custom_generate_unique_id(route: APIRoute) -> str:
function lifespan (line 76) | async def lifespan(app: FastAPI):
function identify_browser (line 104) | async def identify_browser(request: Request, call_next):
function cli (line 124) | def cli():
function runserver (line 132) | def runserver(host, port, workers):
function runeval (line 161) | def runeval(dataset, llm_provider, run_name, tidb_ai_chat_engine):
function runeval_dataset (line 191) | def runeval_dataset(csv, llm_provider, run_name, tidb_ai_chat_engine, ru...
function generate_answer_by_tidb_ai (line 205) | def generate_answer_by_tidb_ai(query: str):
FILE: backend/tests/conftest.py
function env (line 6) | def env():
FILE: backend/tests/test_dynamic_models.py
function dynamic_model_creation (line 7) | def dynamic_model_creation(dim, ns):
function test_concurrent_dynamic_model_creation (line 14) | def test_concurrent_dynamic_model_creation():
FILE: backend/tests/test_llms.py
function check_llm_answer_simple_question (line 31) | def check_llm_answer_simple_question(llm: BaseLLM):
function check_dspy_lm_decompose_question (line 40) | def check_dspy_lm_decompose_question(lm: dspy.LM):
function check_dspy_lm_extract_graph (line 50) | def check_dspy_lm_extract_graph(lm: dspy.LM):
function test_openai (line 66) | def test_openai():
function test_ollama (line 83) | def test_ollama():
function test_gitee_ai (line 95) | def test_gitee_ai():
function test_bedrock (line 108) | def test_bedrock():
function test_vertex_ai (line 125) | def test_vertex_ai():
function test_gemini (line 139) | def test_gemini():
function test_azure_ai (line 152) | def test_azure_ai():
FILE: core/autoflow/chunkers/base.py
class Chunker (line 7) | class Chunker(BaseComponent):
method chunk (line 9) | def chunk(self, document: Document) -> Document:
FILE: core/autoflow/chunkers/helper.py
function get_chunker_for_datatype (line 5) | def get_chunker_for_datatype(datatype: DataType) -> Chunker:
FILE: core/autoflow/chunkers/text.py
class TextChunker (line 8) | class TextChunker(Chunker):
method __init__ (line 11) | def __init__(self, config: Optional[TextChunkerConfig] = TextChunkerCo...
method chunk (line 20) | def chunk(self, document: Document) -> Document:
FILE: core/autoflow/configs/chunkers/base.py
class ChunkerConfig (line 6) | class ChunkerConfig(BaseModel):
method validate_and_create_config (line 21) | def validate_and_create_config(self) -> "ChunkerConfig":
FILE: core/autoflow/configs/chunkers/text.py
class TextChunkerConfig (line 8) | class TextChunkerConfig(BaseModel):
FILE: core/autoflow/configs/db.py
class DatabaseConfig (line 6) | class DatabaseConfig(BaseModel):
FILE: core/autoflow/configs/knowledge_base.py
class IndexMethod (line 12) | class IndexMethod(str, Enum):
class Version (line 23) | class Version(int, Enum):
class KnowledgeBaseConfig (line 27) | class KnowledgeBaseConfig(BaseModel):
FILE: core/autoflow/configs/main.py
class Config (line 6) | class Config(BaseModel):
FILE: core/autoflow/configs/models/embeddings/base.py
class EmbeddingModelConfig (line 8) | class EmbeddingModelConfig(BaseModel):
method validate_and_create_config (line 24) | def validate_and_create_config(self) -> "EmbeddingModelConfig":
FILE: core/autoflow/configs/models/embeddings/common.py
class BaseEmbeddingConfig (line 6) | class BaseEmbeddingConfig(BaseModel):
FILE: core/autoflow/configs/models/embeddings/jina_ai.py
class JinaAIEmbeddingConfig (line 4) | class JinaAIEmbeddingConfig(BaseEmbeddingConfig):
FILE: core/autoflow/configs/models/embeddings/openai.py
class OpenAIEmbeddingConfig (line 4) | class OpenAIEmbeddingConfig(BaseEmbeddingConfig):
FILE: core/autoflow/configs/models/llms/base.py
class LLMConfig (line 10) | class LLMConfig(BaseModel):
method validate_and_create_config (line 25) | def validate_and_create_config(self) -> "LLMConfig":
FILE: core/autoflow/configs/models/llms/common.py
class BaseLLMConfig (line 6) | class BaseLLMConfig(BaseModel):
FILE: core/autoflow/configs/models/llms/openai.py
class OpenAILLMConfig (line 4) | class OpenAILLMConfig(BaseLLMConfig):
FILE: core/autoflow/configs/models/manager.py
class ManagerConfig (line 8) | class ManagerConfig(BaseModel):
FILE: core/autoflow/configs/models/providers/base.py
class ModelProviderInfo (line 11) | class ModelProviderInfo(BaseModel):
class ProviderConfig (line 32) | class ProviderConfig(BaseModel):
FILE: core/autoflow/configs/models/providers/jinaai.py
class JinaAIConfig (line 4) | class JinaAIConfig(ProviderConfig):
FILE: core/autoflow/configs/models/providers/openai.py
class OpenAIConfig (line 4) | class OpenAIConfig(ProviderConfig):
FILE: core/autoflow/configs/models/rerankers/base.py
class RerankerConfig (line 7) | class RerankerConfig(BaseModel):
method validate_and_create_config (line 22) | def validate_and_create_config(self) -> "RerankerConfig":
FILE: core/autoflow/configs/models/rerankers/common.py
class BaseRerankerConfig (line 4) | class BaseRerankerConfig(BaseModel):
FILE: core/autoflow/configs/models/rerankers/jina_ai.py
class JinaAIRerankerConfig (line 4) | class JinaAIRerankerConfig(BaseRerankerConfig):
FILE: core/autoflow/data_types.py
class DataType (line 7) | class DataType(str, Enum):
function guess_datatype (line 18) | def guess_datatype(source: Union[str, IO, BinaryIO, TextIO]) -> Optional...
function guess_by_filename (line 35) | def guess_by_filename(filename: str) -> Optional[DataType]:
FILE: core/autoflow/db.py
function get_db_engine_from_config (line 8) | def get_db_engine_from_config(db_config: DatabaseConfig) -> Engine:
FILE: core/autoflow/knowledge_base/base.py
class KnowledgeBase (line 30) | class KnowledgeBase(BaseComponent):
method __init__ (line 37) | def __init__(
method _init_stores (line 63) | def _init_stores(self):
method _init_indexes (line 80) | def _init_indexes(self):
method class_name (line 88) | def class_name(self):
method documents (line 91) | def documents(self):
method knowledge_graph (line 94) | def knowledge_graph(self):
method add (line 97) | def add(
method build_index_for_document (line 124) | def build_index_for_document(
method add_document (line 159) | def add_document(self, document: Document):
method add_documents (line 162) | def add_documents(self, documents: List[Document]):
method list_documents (line 165) | def list_documents(self) -> List[Document]:
method get_document (line 168) | def get_document(self, doc_id: uuid.UUID) -> Document:
method delete_document (line 171) | def delete_document(self, doc_id: uuid.UUID) -> None:
method search (line 176) | def search(self):
method search_documents (line 180) | def search_documents(
method search_knowledge_graph (line 198) | def search_knowledge_graph(
method ask (line 214) | def ask(self, question: str) -> ChatResponse:
method reset (line 234) | def reset(self):
FILE: core/autoflow/knowledge_graph/extractors/base.py
class KGExtractor (line 7) | class KGExtractor(BaseComponent):
method __init__ (line 8) | def __init__(self, *args, **kwargs):
method extract (line 12) | def extract(self, text: str) -> GeneratedKnowledgeGraph:
FILE: core/autoflow/knowledge_graph/extractors/simple.py
class SimpleKGExtractor (line 11) | class SimpleKGExtractor(KGExtractor):
method __init__ (line 12) | def __init__(self, dspy_lm: dspy.LM):
method extract (line 18) | def extract(self, text: str) -> GeneratedKnowledgeGraph:
FILE: core/autoflow/knowledge_graph/index.py
class KnowledgeGraphIndex (line 21) | class KnowledgeGraphIndex(BaseComponent):
method __init__ (line 22) | def __init__(
method add_text (line 34) | def add_text(self, text: str) -> Optional[KnowledgeGraph]:
method add_chunk (line 38) | def add_chunk(self, chunk: Chunk) -> Optional[KnowledgeGraph]:
method retrieve (line 53) | def retrieve(
FILE: core/autoflow/knowledge_graph/programs/eval_graph.py
class EvaluateKnowledgeGraph (line 13) | class EvaluateKnowledgeGraph(dspy.Signature):
class KGEvaluationResult (line 41) | class KGEvaluationResult(BaseModel):
class KnowledgeGraphEvaluator (line 47) | class KnowledgeGraphEvaluator(dspy.Module):
method __init__ (line 48) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 53) | def forward(
FILE: core/autoflow/knowledge_graph/programs/extract_covariates.py
class InputEntity (line 14) | class InputEntity(BaseModel):
class OutputEntity (line 21) | class OutputEntity(BaseModel):
class ExtractEntityCovariate (line 33) | class ExtractEntityCovariate(dspy.Signature):
class EntityCovariateExtractor (line 56) | class EntityCovariateExtractor(dspy.Module):
method __init__ (line 57) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 62) | def forward(
FILE: core/autoflow/knowledge_graph/programs/extract_graph.py
class PredictEntity (line 17) | class PredictEntity(BaseModel):
class PredictRelationship (line 32) | class PredictRelationship(BaseModel):
class PredictKnowledgeGraph (line 49) | class PredictKnowledgeGraph(BaseModel):
method to_pandas (line 59) | def to_pandas(self):
class ExtractKnowledgeGraph (line 85) | class ExtractKnowledgeGraph(dspy.Signature):
class KnowledgeGraphExtractor (line 118) | class KnowledgeGraphExtractor(dspy.Module):
method __init__ (line 119) | def __init__(self, dspy_lm: dspy.LM):
method forward (line 124) | def forward(self, text: str) -> GeneratedKnowledgeGraph:
FILE: core/autoflow/knowledge_graph/retrievers/base.py
class KGRetriever (line 8) | class KGRetriever(ABC):
method __init__ (line 9) | def __init__(self, knowledge_graph_store: GraphStore):
method retrieve (line 13) | def retrieve(
FILE: core/autoflow/knowledge_graph/retrievers/weighted.py
class WeightedGraphRetriever (line 45) | class WeightedGraphRetriever(KGRetriever):
method __init__ (line 46) | def __init__(
method retrieve (line 68) | def retrieve(
method _fill_entity (line 170) | def _fill_entity(self, relationships: List[RetrievedRelationship]):
method _weighted_search_relationships (line 182) | def _weighted_search_relationships(
method _rank_relationships (line 209) | def _rank_relationships(
method _calc_relationship_weighted_score (line 244) | def _calc_relationship_weighted_score(
method _calc_weight_score (line 257) | def _calc_weight_score(self, weight: float) -> float:
method _calc_degree_score (line 271) | def _calc_degree_score(self, in_degree: int, out_degree: int) -> float:
FILE: core/autoflow/knowledge_graph/types.py
class GeneratedEntity (line 17) | class GeneratedEntity(BaseModel):
class GeneratedRelationship (line 23) | class GeneratedRelationship(BaseModel):
class GeneratedKnowledgeGraph (line 30) | class GeneratedKnowledgeGraph(BaseModel):
method to_create (line 34) | def to_create(
class RetrievedEntity (line 66) | class RetrievedEntity(Entity):
method __hash__ (line 70) | def __hash__(self):
method __eq__ (line 73) | def __eq__(self, other: "RetrievedEntity"):
class RetrievedRelationship (line 77) | class RetrievedRelationship(Relationship):
method __hash__ (line 81) | def __hash__(self):
method __eq__ (line 84) | def __eq__(self, other: "RetrievedRelationship"):
class RetrievedKnowledgeGraph (line 88) | class RetrievedKnowledgeGraph(BaseModel):
FILE: core/autoflow/loaders/base.py
class Loader (line 8) | class Loader(BaseComponent):
method load (line 10) | def load(
class FileLoader (line 16) | class FileLoader(Loader):
method load (line 17) | def load(self, files: str | list[str], **kwargs) -> Generator[Document...
method _load_file (line 25) | def _load_file(self, file: str) -> Document:
FILE: core/autoflow/loaders/helper.py
function get_loader_for_datatype (line 5) | def get_loader_for_datatype(datatype: DataType) -> Loader:
FILE: core/autoflow/loaders/markdown.py
class MarkdownLoader (line 6) | class MarkdownLoader(FileLoader):
method _load_file (line 7) | def _load_file(self, file: str) -> Document:
FILE: core/autoflow/loaders/pdf.py
class PDFLoader (line 8) | class PDFLoader(FileLoader):
method _load_file (line 9) | def _load_file(self, file: str) -> Document:
FILE: core/autoflow/loaders/webpage.py
class WebpageLoader (line 28) | class WebpageLoader(Loader):
method __init__ (line 29) | def __init__(
method load (line 38) | def load(self, urls: str | list[str], **kwargs) -> Generator[Document,...
FILE: core/autoflow/main.py
class Autoflow (line 15) | class Autoflow:
method __init__ (line 18) | def __init__(
method from_config (line 27) | def from_config(cls, config: Config) -> "Autoflow":
method _init_db_engine (line 33) | def _init_db_engine(cls, db_config: DatabaseConfig) -> Engine:
method db_engine (line 41) | def db_engine(self) -> Engine:
method llm_manager (line 45) | def llm_manager(self) -> "ModelManager":
method create_knowledge_base (line 48) | def create_knowledge_base(
FILE: core/autoflow/models/embedding_models/litellm.py
function get_embeddings (line 6) | def get_embeddings(
class LiteLLMEmbedding (line 41) | class LiteLLMEmbedding(BaseEmbedding):
method __init__ (line 60) | def __init__(
method class_name (line 68) | def class_name(cls) -> str:
method _aget_query_embedding (line 71) | async def _aget_query_embedding(self, query: str) -> List[float]:
method _aget_text_embedding (line 74) | async def _aget_text_embedding(self, text: str) -> List[float]:
method _get_query_embedding (line 77) | def _get_query_embedding(self, query: str) -> List[float]:
method _get_text_embedding (line 88) | def _get_text_embedding(self, text: str) -> List[float]:
method _get_text_embeddings (line 99) | def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
FILE: core/autoflow/models/llms/dspy.py
function get_dspy_lm_by_llm (line 6) | def get_dspy_lm_by_llm(llm: LLM) -> dspy.LM:
FILE: core/autoflow/models/manager.py
class ModelManager (line 18) | class ModelManager:
method load_from_db (line 22) | def load_from_db(cls):
method from_config (line 26) | def from_config(cls):
method registry_provider (line 29) | def registry_provider(self, name: ModelProviders, config: ProviderConf...
method get_provider_config (line 32) | def get_provider_config(self, name: ModelProviders) -> Optional[Provid...
method resolve_llm (line 38) | def resolve_llm(
method resolve_embedding_model (line 57) | def resolve_embedding_model(
method resolve_rerank_model (line 77) | def resolve_rerank_model(
FILE: core/autoflow/models/provider.py
class ProviderRegistry (line 4) | class ProviderRegistry(ABC):
method register (line 5) | def register(self, name: str):
method get_provider_credentials (line 8) | def get_provider_credentials(self):
FILE: core/autoflow/models/rerank_models/litellm.py
class LiteLLMReranker (line 17) | class LiteLLMReranker(BaseNodePostprocessor):
method __init__ (line 23) | def __init__(
method class_name (line 33) | def class_name(cls) -> str:
method _postprocess_nodes (line 36) | def _postprocess_nodes(
FILE: core/autoflow/orms/base.py
class UUIDBaseModel (line 12) | class UUIDBaseModel(TableModel):
FILE: core/autoflow/storage/doc_store/base.py
class DocumentStore (line 8) | class DocumentStore(ABC):
method add (line 10) | def add(self, documents: List[Document]) -> List[Document]:
method update (line 14) | def update(self, document_id: UUID, update: Dict[str, Any]):
method delete (line 18) | def delete(self, document_id: UUID) -> None:
method list (line 22) | def list(self, filters: Dict[str, Any] = None) -> List[Document]:
method search (line 26) | def search(
method get (line 35) | def get(self, document_id: UUID) -> Document:
method add_doc_chunks (line 39) | def add_doc_chunks(self, document_id: UUID, chunks: List[Chunk]) -> Li...
method list_doc_chunks (line 43) | def list_doc_chunks(self, document_id: UUID) -> List[Chunk]:
method get_chunk (line 47) | def get_chunk(self, chunk_id: UUID) -> Chunk:
method update_chunk (line 51) | def update_chunk(self, chunk_id: UUID, update: Dict[str, Any]) -> Chunk:
method delete_chunk (line 55) | def delete_chunk(self, chunk_id: UUID) -> None:
FILE: core/autoflow/storage/doc_store/tidb_doc_store.py
function dynamic_create_models (line 31) | def dynamic_create_models(
class TiDBDocumentStore (line 103) | class TiDBDocumentStore(DocumentStore):
method __init__ (line 110) | def __init__(
method class_name (line 124) | def class_name(cls) -> str:
method _init_store (line 127) | def _init_store(
method add (line 140) | def add(self, documents: List[Document]) -> List[Document]:
method update (line 163) | def update(self, document_id: UUID, update: Dict[str, Any]) -> None:
method delete (line 169) | def delete(self, document_id: UUID) -> None:
method get (line 180) | def get(self, document_id: UUID) -> Document:
method list (line 188) | def list(self, filters: Dict[str, Any] = None) -> List[Document]:
method search (line 195) | def search(
method _convert_to_retrieval_result (line 228) | def _convert_to_retrieval_result(
method add_doc_chunks (line 253) | def add_doc_chunks(self, document_id: UUID, chunks: List[Chunk]) -> Li...
method list_doc_chunks (line 266) | def list_doc_chunks(self, document_id: UUID) -> List[Chunk]:
method get_chunk (line 272) | def get_chunk(self, chunk_id: UUID) -> Chunk:
method delete_chunk (line 279) | def delete_chunk(self, chunk_id: UUID) -> None:
method update_chunk (line 285) | def update_chunk(self, chunk_id: UUID, update: Dict[str, Any]) -> Chunk:
method recreate (line 294) | def recreate(self) -> None:
method reset (line 300) | def reset(self) -> None:
FILE: core/autoflow/storage/doc_store/types.py
class Chunk (line 15) | class Chunk(BaseModel):
method hash (line 32) | def hash(self) -> Optional[str]:
class RetrievedChunk (line 36) | class RetrievedChunk(Chunk):
class Document (line 46) | class Document(BaseModel):
method hash (line 64) | def hash(self) -> Optional[str]:
class DocumentDescriptor (line 68) | class DocumentDescriptor(BaseModel):
class DocumentSearchResult (line 74) | class DocumentSearchResult(BaseModel):
FILE: core/autoflow/storage/graph_store/base.py
class GraphStore (line 32) | class GraphStore(BaseComponent, ABC):
method list_entities (line 37) | def list_entities(
method search_entities (line 43) | def search_entities(
method get_entity (line 53) | def get_entity(self, entity_id: UUID) -> Entity:
method must_get_entity (line 57) | def must_get_entity(self, entity_id: UUID) -> Entity:
method create_entity (line 63) | def create_entity(
method update_entity (line 74) | def update_entity(self, entity: Entity | UUID, update: EntityUpdate) -...
method delete_entity (line 78) | def delete_entity(self, entity_id: UUID) -> None:
method delete_orphan_entities (line 82) | def delete_orphan_entities(self):
method calc_entity_out_degree (line 88) | def calc_entity_out_degree(self, entity_id: UUID) -> int:
method calc_entity_in_degree (line 92) | def calc_entity_in_degree(self, entity_id: UUID) -> int:
method calc_entity_degree (line 96) | def calc_entity_degree(self, entity_id: UUID) -> int:
method calc_entities_degrees (line 100) | def calc_entities_degrees(
method get_relationship (line 108) | def get_relationship(self, relationship_id: UUID) -> Relationship:
method list_relationships (line 112) | def list_relationships(self, filters: RelationshipFilters) -> List[Rel...
method create_relationship (line 116) | def create_relationship(
method update_relationship (line 127) | def update_relationship(
method delete_relationship (line 133) | def delete_relationship(self, relationship_id: UUID):
method search_relationships (line 137) | def search_relationships(
method reset (line 158) | def reset(self):
method drop (line 162) | def drop(self):
method add (line 168) | def add(self, knowledge_graph: KnowledgeGraphCreate) -> Optional[Knowl...
FILE: core/autoflow/storage/graph_store/tidb_graph_store.py
function dynamic_create_models (line 40) | def dynamic_create_models(
class TiDBGraphStore (line 152) | class TiDBGraphStore(GraphStore):
method __init__ (line 159) | def __init__(
method _init_store (line 174) | def _init_store(
method get_entity (line 189) | def get_entity(self, entity_id: UUID) -> Entity:
method list_entities (line 192) | def list_entities(
method search_entities (line 200) | def search_entities(
method _convert_entity_filters (line 219) | def _convert_entity_filters(self, filters: Optional[EntityFilters]) ->...
method create_entity (line 230) | def create_entity(
method _get_entity_embedding (line 249) | def _get_entity_embedding(self, name: str, description: str) -> list[f...
method find_or_create_entity (line 253) | def find_or_create_entity(
method update_entity (line 277) | def update_entity(self, entity: Entity | UUID, update: EntityUpdate) -...
method delete_entity (line 302) | def delete_entity(self, entity_id: UUID) -> None:
method delete_orphan_entities (line 311) | def delete_orphan_entities(self):
method calc_entity_out_degree (line 316) | def calc_entity_out_degree(self, entity_id: UUID) -> int:
method calc_entity_in_degree (line 322) | def calc_entity_in_degree(self, entity_id: UUID) -> int:
method calc_entity_degree (line 328) | def calc_entity_degree(self, entity_id: UUID) -> int:
method calc_entities_degrees (line 337) | def calc_entities_degrees(
method get_relationship (line 367) | def get_relationship(self, relationship_id: UUID) -> Relationship:
method list_relationships (line 370) | def list_relationships(
method search_relationships (line 378) | def search_relationships(
method _convert_relationship_filters (line 409) | def _convert_relationship_filters(self, filters: RelationshipFilters) ...
method create_relationship (line 481) | def create_relationship(
method _get_relationship_embedding (line 515) | def _get_relationship_embedding(
method update_relationship (line 529) | def update_relationship(
method delete_relationship (line 552) | def delete_relationship(self, relationship_id: UUID):
method add (line 557) | def add(self, knowledge_graph: KnowledgeGraphCreate) -> Optional[Knowl...
method reset (line 607) | def reset(self):
method recreate (line 614) | def recreate(self):
method drop (line 622) | def drop(self):
FILE: core/autoflow/storage/graph_store/types.py
class EntityType (line 12) | class EntityType(str, Enum):
method __str__ (line 16) | def __str__(self):
class Entity (line 20) | class Entity(BaseModel):
class EntityCreate (line 37) | class EntityCreate(BaseModel):
class SynopsisEntityCreate (line 44) | class SynopsisEntityCreate(EntityCreate):
method validate_entities (line 49) | def validate_entities(self):
class EntityFilters (line 55) | class EntityFilters(BaseModel):
class EntityUpdate (line 60) | class EntityUpdate(BaseModel):
class EntityDegree (line 67) | class EntityDegree(BaseModel):
class Relationship (line 76) | class Relationship(BaseModel):
class RelationshipCreate (line 94) | class RelationshipCreate(BaseModel):
class RelationshipUpdate (line 104) | class RelationshipUpdate(BaseModel):
class RelationshipFilters (line 109) | class RelationshipFilters(BaseModel):
class KnowledgeGraph (line 147) | class KnowledgeGraph(BaseModel):
class KnowledgeGraphCreate (line 155) | class KnowledgeGraphCreate(BaseModel):
FILE: core/autoflow/storage/types.py
class QueryBundle (line 6) | class QueryBundle(BaseModel):
FILE: core/autoflow/utils/hash.py
function sha256 (line 5) | def sha256(text: Optional[str]) -> Optional[str]:
FILE: core/autoflow/utils/uuid6.py
class UUID (line 17) | class UUID(uuid.UUID):
method __init__ (line 20) | def __init__(
method subsec (line 57) | def subsec(self) -> int:
method time (line 61) | def time(self) -> int:
function _subsec_decode (line 73) | def _subsec_decode(value: int) -> int:
function _subsec_encode (line 77) | def _subsec_encode(value: int) -> int:
function uuid6 (line 85) | def uuid6(clock_seq: int = None) -> UUID:
function uuid7 (line 114) | def uuid7() -> UUID:
FILE: core/autoflow/utils/vector.py
function cosine_distance (line 1) | def cosine_distance(v1, v2):
FILE: core/examples/streamlit/build-knowledge-search-with-autoflow-and-streamlit.py
function on_submit (line 104) | def on_submit():
FILE: core/tests/conftest.py
function env (line 18) | def env():
function db_engine (line 24) | def db_engine():
function llm (line 37) | def llm():
function embedding_model (line 42) | def embedding_model():
function tidb_client (line 47) | def tidb_client(db_engine):
FILE: core/tests/knowledge_base/test_kb_with_namespace.py
function kb (line 12) | def kb(db_engine, llm, embedding_model):
function test_add_documents_via_filepath (line 28) | def test_add_documents_via_filepath(kb: KnowledgeBase):
function test_add_documents_via_url (line 33) | def test_add_documents_via_url(kb: KnowledgeBase):
function test_search_documents (line 38) | def test_search_documents(kb: KnowledgeBase):
function test_search_knowledge_graph (line 46) | def test_search_knowledge_graph(kb: KnowledgeBase):
FILE: core/tests/knowledge_base/test_kb_without_namespace.py
function kb (line 12) | def kb(db_engine, llm, embedding_model):
function test_add_documents_via_filepath (line 25) | def test_add_documents_via_filepath(kb: KnowledgeBase):
function test_add_documents_via_url (line 30) | def test_add_documents_via_url(kb):
function test_search_documents (line 35) | def test_search_documents(kb):
function test_search_knowledge_graph (line 43) | def test_search_knowledge_graph(kb):
FILE: core/tests/knowledge_graph/programs/test_extract_graph.py
function extractor (line 14) | def extractor(llm):
function evaluator (line 21) | def evaluator(llm):
function test_extract_graph (line 27) | def test_extract_graph(extractor, evaluator):
FILE: core/tests/knowledge_graph/test_kg_extractor.py
function test_kg_extractor (line 6) | def test_kg_extractor(llm):
FILE: core/tests/models/test_model_manager.py
function setup_model_manager (line 19) | def setup_model_manager():
function test_llm (line 35) | def test_llm():
function test_embedding_model (line 58) | def test_embedding_model():
function test_reranker_model (line 70) | def test_reranker_model():
FILE: core/tests/storage/doc_store/test_tidb_doc_store.py
function doc_store (line 11) | def doc_store():
function doc_store_with_auto_embed (line 17) | def doc_store_with_auto_embed():
function test_crud (line 27) | def test_crud(doc_store):
function test_crud_with_auto_embed (line 103) | def test_crud_with_auto_embed(doc_store_with_auto_embed):
FILE: core/tests/storage/graph_store/test_tidb_graph_store.py
function graph_store (line 17) | def graph_store(tidb_client, embedding_model):
function test_entity_crud (line 25) | def test_entity_crud(graph_store: TiDBGraphStore):
function test_relationship_crud (line 97) | def test_relationship_crud(graph_store: TiDBGraphStore):
function test_entity_degree (line 163) | def test_entity_degree(graph_store: TiDBGraphStore):
FILE: docs/src/app/[[...mdxPath]]/page.jsx
function generateMetadata (line 6) | async function generateMetadata(props) {
function Page (line 14) | async function Page(props) {
FILE: docs/src/app/_app.tsx
function App (line 4) | function App({ Component, pageProps }: AppProps) {
FILE: docs/src/app/layout.jsx
function RootLayout (line 23) | async function RootLayout({ children }) {
FILE: e2e/tests/api.spec.ts
function expectGetOkStep (line 27) | async function expectGetOkStep (url: string) {
function expectOk (line 89) | async function expectOk (response: APIResponse | Promise<APIResponse>) {
FILE: e2e/tests/bootstrap.ts
function clickTab (line 73) | async function clickTab (text: string, url: string) {
FILE: e2e/tests/chat-engine.spec.ts
function checkChatEngineAvailability (line 198) | async function checkChatEngineAvailability (page: Page, name: string) {
function getChatEngine (line 221) | async function getChatEngine (page: Page, id: number) {
function waitUpdate (line 227) | async function waitUpdate (page: Page, locator: Locator) {
FILE: e2e/tests/evaluation.spec.ts
function createEvaluationDataset (line 71) | async function createEvaluationDataset (page: Page, name: string, file?:...
FILE: e2e/tests/knowledge-base.spec.ts
function createFeaturedKnowledgeBase (line 118) | async function createFeaturedKnowledgeBase (page: Page, name: string, en...
function configureSimpleDataSource (line 149) | async function configureSimpleDataSource (page: Page, kbId: number, enab...
function pollKbOverviewUntill (line 174) | async function pollKbOverviewUntill (page: Page, kbId: number, isOk: (js...
FILE: e2e/tests/site-settings.spec.ts
function submitAndWaitSavedByLabel (line 36) | async function submitAndWaitSavedByLabel (label: string) {
FILE: e2e/tests/widget.spec.ts
function testWidgetChat (line 54) | async function testWidgetChat (page: Page, dialog: Locator) {
FILE: e2e/utils/chat.ts
constant QUESTION (line 3) | const QUESTION = 'What is the content of sample.pdf?';
function getChatRequestPromise (line 5) | function getChatRequestPromise (page: Page, baseURL: string) {
function testNewChat (line 9) | async function testNewChat (page: Page, chatRequest: Request, validatePa...
FILE: e2e/utils/forms.ts
function selectOption (line 3) | async function selectOption (page: Page, name: string, value: string | R...
function turnSwitch (line 14) | async function turnSwitch (page: Page, name: string, on: boolean = true) {
function checkCheckbox (line 33) | async function checkCheckbox (page: Page, name: string, on: boolean = tr...
FILE: e2e/utils/login.ts
function loginViaApi (line 3) | async function loginViaApi ({ request }: { request: APIRequestContext }) {
FILE: frontend/app/.storybook/main.ts
method webpack (line 19) | webpack (config) {
FILE: frontend/app/jest.polyfills.js
class ResizeObserver (line 35) | class ResizeObserver {
method observe (line 36) | observe() {
method disconnect (line 39) | disconnect() {
FILE: frontend/app/next.config.ts
method webpack (line 18) | webpack (config, options) {
FILE: frontend/app/src/api/api-keys.ts
type ApiKey (line 5) | interface ApiKey {
type CreateApiKey (line 15) | interface CreateApiKey {
type CreateApiKeyResponse (line 19) | interface CreateApiKeyResponse {
function listApiKeys (line 37) | async function listApiKeys ({ page = 1, size = 10 }: PageParams = {}): P...
function createApiKey (line 44) | async function createApiKey (create: CreateApiKey): Promise<CreateApiKey...
function deleteApiKey (line 55) | async function deleteApiKey (id: number): Promise<void> {
FILE: frontend/app/src/api/auth.ts
type LoginParams (line 3) | interface LoginParams {
function login (line 8) | async function login (params: LoginParams) {
function logout (line 21) | async function logout () {
FILE: frontend/app/src/api/chat-engines.ts
type ChatEngine (line 5) | interface ChatEngine {
type CreateChatEngineParams (line 19) | interface CreateChatEngineParams {
type ChatEngineOptions (line 27) | interface ChatEngineOptions {
type ChatEngineKnowledgeBaseOptions (line 41) | interface ChatEngineKnowledgeBaseOptions {
type ChatEngineKnowledgeGraphOptions (line 49) | interface ChatEngineKnowledgeGraphOptions {
type ChatEngineLLMOptions (line 57) | type ChatEngineLLMOptions = {
type LinkedKnowledgeBaseOptions (line 70) | interface LinkedKnowledgeBaseOptions {
function getDefaultChatEngineOptions (line 141) | async function getDefaultChatEngineOptions (): Promise<ChatEngineOptions> {
function listChatEngines (line 148) | async function listChatEngines ({ page = 1, size = 10 }: PageParams = {}...
function getChatEngine (line 155) | async function getChatEngine (id: number): Promise<ChatEngine> {
function updateChatEngine (line 162) | async function updateChatEngine (id: number, partial: Partial<Pick<ChatE...
function createChatEngine (line 174) | async function createChatEngine (create: CreateChatEngineParams) {
function deleteChatEngine (line 186) | async function deleteChatEngine (id: number): Promise<void> {
function listPublicChatEngines (line 196) | async function listPublicChatEngines ({ page = 1, size = 10 }: PageParam...
function getPublicChatEngine (line 203) | async function getPublicChatEngine (id: number): Promise<ChatEngine> {
FILE: frontend/app/src/api/chats.ts
type ClientEngineOptions (line 9) | type ClientEngineOptions = Omit<ChatEngineOptions, 'post_verification_to...
type Chat (line 11) | interface Chat {
type ChatDetail (line 24) | interface ChatDetail {
type ChatMessageRole (line 29) | const enum ChatMessageRole {
type ChatMessage (line 34) | interface ChatMessage {
type ChatMessageSource (line 50) | interface ChatMessageSource {
type FeedbackParams (line 96) | interface FeedbackParams {
type PostChatParams (line 101) | interface PostChatParams {
function listChats (line 110) | async function listChats ({ page = 1, size = 10 }: PageParams = {}): Pro...
function getChat (line 117) | async function getChat (id: string): Promise<ChatDetail> {
function deleteChat (line 124) | async function deleteChat (id: string): Promise<void> {
function postFeedback (line 131) | async function postFeedback (chatMessageId: number, feedback: FeedbackPa...
function getChatMessageSubgraph (line 143) | async function getChatMessageSubgraph (chatMessageId: number): Promise<K...
function getChatMessageRecommendedQuestions (line 151) | async function getChatMessageRecommendedQuestions (chatMessageId: number) {
function reloadChatMessageRecommendedQuestions (line 159) | async function reloadChatMessageRecommendedQuestions (chatMessageId: num...
FILE: frontend/app/src/api/commons.ts
function setDefault (line 3) | async function setDefault (type: 'embedding-models' | 'llms' | 'reranker...
FILE: frontend/app/src/api/datasources.ts
type DatasourceBase (line 6) | interface DatasourceBase {
type DatasourceSpec (line 11) | type DatasourceSpec = ({
type Datasource (line 22) | type Datasource = DatasourceBase & DatasourceSpec;
type DataSourceIndexProgress (line 24) | type DataSourceIndexProgress = {
type BaseCreateDatasourceParams (line 32) | interface BaseCreateDatasourceParams {
type CreateDatasourceSpecParams (line 36) | type CreateDatasourceSpecParams = ({
type CreateDatasourceParams (line 47) | type CreateDatasourceParams = BaseCreateDatasourceParams & CreateDatasou...
type Upload (line 49) | interface Upload {
type DatasourceVectorIndexError (line 60) | type DatasourceVectorIndexError = {
type DatasourceKgIndexError (line 67) | type DatasourceKgIndexError = {
function listDataSources (line 119) | async function listDataSources (kbId: number, { page = 1, size = 10 }: P...
function getDatasource (line 125) | async function getDatasource (kbId: number, id: number): Promise<Datasou...
function deleteDatasource (line 131) | async function deleteDatasource (kbId: number, id: number): Promise<void> {
function createDatasource (line 138) | async function createDatasource (kbId: number, params: CreateDatasourceP...
function updateDatasource (line 149) | async function updateDatasource (kbId: number, id: number, params: { nam...
function uploadFiles (line 160) | async function uploadFiles (files: File[]) {
FILE: frontend/app/src/api/documents.ts
type Document (line 17) | interface Document {
type ListDocumentsTableFilters (line 76) | type ListDocumentsTableFilters = z.infer<typeof listDocumentsFiltersSche...
function listDocuments (line 78) | async function listDocuments ({ page = 1, size = 10, knowledge_base_id, ...
type MimeType (line 91) | interface MimeType {
FILE: frontend/app/src/api/embedding-models.ts
type EmbeddingModelSummary (line 6) | interface EmbeddingModelSummary {
type EmbeddingModel (line 15) | interface EmbeddingModel extends EmbeddingModelSummary {
type EmbeddingModelOption (line 21) | interface EmbeddingModelOption extends ProviderOption {
type CreateEmbeddingModel (line 26) | interface CreateEmbeddingModel {
type UpdateEmbeddingModel (line 35) | interface UpdateEmbeddingModel {
function listEmbeddingModelOptions (line 61) | async function listEmbeddingModelOptions () {
function getEmbeddingModel (line 68) | async function getEmbeddingModel (id: number) {
function listEmbeddingModels (line 75) | async function listEmbeddingModels (params: PageParams) {
function createEmbeddingModel (line 82) | async function createEmbeddingModel (create: CreateEmbeddingModel) {
function updateEmbeddingModel (line 93) | async function updateEmbeddingModel (id: number, update: UpdateEmbedding...
function testEmbeddingModel (line 104) | async function testEmbeddingModel (createEmbeddingModel: CreateEmbedding...
FILE: frontend/app/src/api/evaluations.ts
type EvaluationDataset (line 5) | interface EvaluationDataset {
type EvaluationDatasetItem (line 13) | interface EvaluationDatasetItem {
type EvaluationTask (line 24) | interface EvaluationTask {
type EvaluationTaskWithSummary (line 33) | interface EvaluationTaskWithSummary extends EvaluationTask {
type EvaluationTaskSummaryMetric (line 48) | type EvaluationTaskSummaryMetric = typeof EvaluationTaskSummaryMetrics[n...
type EvaluationTaskSummary (line 50) | interface EvaluationTaskSummary extends Record<EvaluationTaskSummaryMetr...
type EvaluationTaskItemStatus (line 57) | type EvaluationTaskItemStatus = 'not_start' | 'evaluating' | 'done' | 'e...
type EvaluationTaskItem (line 59) | interface EvaluationTaskItem {
type CreateEvaluationDatasetParams (line 76) | interface CreateEvaluationDatasetParams {
type UpdateEvaluationDatasetParams (line 81) | interface UpdateEvaluationDatasetParams {
type CreateEvaluationDatasetItemParams (line 85) | interface CreateEvaluationDatasetItemParams {
type UpdateEvaluationDatasetItemParams (line 92) | interface UpdateEvaluationDatasetItemParams {
type CreateEvaluationTaskParams (line 99) | interface CreateEvaluationTaskParams {
function listEvaluationDatasets (line 172) | async function listEvaluationDatasets ({ ...params }: PageParams & { key...
function createEvaluationDataset (line 179) | async function createEvaluationDataset (params: CreateEvaluationDatasetP...
function updateEvaluationDataset (line 191) | async function updateEvaluationDataset (id: number, params: UpdateEvalua...
function deleteEvaluationDataset (line 203) | async function deleteEvaluationDataset (id: number): Promise<void> {
function listEvaluationDatasetItems (line 214) | async function listEvaluationDatasetItems (datasetId: number, { ...param...
function createEvaluationDatasetItem (line 221) | async function createEvaluationDatasetItem (datasetId: number, params: C...
function updateEvaluationDatasetItem (line 236) | async function updateEvaluationDatasetItem (datasetId: number, id: numbe...
function getEvaluationDatasetItem (line 251) | async function getEvaluationDatasetItem (datasetId: number, id: number) {
function deleteEvaluationDatasetItem (line 261) | async function deleteEvaluationDatasetItem (datasetId: number, id: numbe...
function createEvaluationTask (line 273) | async function createEvaluationTask (params: CreateEvaluationTaskParams)...
function listEvaluationTasks (line 285) | async function listEvaluationTasks ({ ...params }: PageParams & { keywor...
function getEvaluationTaskWithSummary (line 292) | async function getEvaluationTaskWithSummary (id: number): Promise<Evalua...
function cancelEvaluationTask (line 299) | async function cancelEvaluationTask (id: number): Promise<void> {
function listEvaluationTaskItems (line 307) | async function listEvaluationTaskItems (id: number, params: PageParams &...
FILE: frontend/app/src/api/feedbacks.ts
type FeedbackType (line 5) | const enum FeedbackType {
type Feedback (line 10) | interface Feedback {
function listFeedbacks (line 42) | async function listFeedbacks ({ page = 1, size = 10 }: PageParams = {}):...
FILE: frontend/app/src/api/graph.ts
type KnowledgeGraph (line 6) | interface KnowledgeGraph {
type KnowledgeGraphEntityType (line 11) | const enum KnowledgeGraphEntityType {
type KnowledgeGraphEntity (line 16) | interface KnowledgeGraphEntity {
type KnowledgeGraphRelationship (line 29) | interface KnowledgeGraphRelationship {
type UpdateEntityParams (line 68) | interface UpdateEntityParams {
type CreateSynopsisEntityParams (line 74) | interface CreateSynopsisEntityParams {
type UpdateRelationshipParams (line 82) | interface UpdateRelationshipParams {
type GraphSearchParams (line 88) | interface GraphSearchParams {
type KBRetrieveKnowledgeGraphParams (line 95) | interface KBRetrieveKnowledgeGraphParams {
type KnowledgeGraphRetrievalConfig (line 103) | interface KnowledgeGraphRetrievalConfig {
function search (line 113) | async function search (kbId: number, params: GraphSearchParams) {
function searchEntity (line 124) | async function searchEntity (kbId: number, query: string, top_k: number ...
function getEntity (line 133) | async function getEntity (kbId: number, id: number) {
function updateEntity (line 142) | async function updateEntity (kbId: number, id: number, params: UpdateEnt...
function createSynopsisEntity (line 153) | async function createSynopsisEntity (kbId: number, params: CreateSynopsi...
function getEntitySubgraph (line 164) | async function getEntitySubgraph (kbId: number, id: number) {
function getEntireKnowledgeGraph (line 173) | async function getEntireKnowledgeGraph (kbId: number, params: KBRetrieve...
function streamEntireKnowledgeGraph (line 185) | async function streamEntireKnowledgeGraph (kbId: number): Promise<Knowle...
function getRelationship (line 239) | async function getRelationship (kbId: number, id: number) {
function updateRelationship (line 248) | async function updateRelationship (kbId: number, id: number, params: Upd...
FILE: frontend/app/src/api/knowledge-base.ts
type KnowledgeBaseIndexMethod (line 10) | type KnowledgeBaseIndexMethod = 'vector' | 'knowledge_graph';
type CreateKnowledgeBaseParams (line 12) | interface CreateKnowledgeBaseParams {
type UpdateKnowledgeBaseParams (line 21) | interface UpdateKnowledgeBaseParams {
type KnowledgeBaseSummary (line 26) | interface KnowledgeBaseSummary {
type KnowledgeBase (line 40) | interface KnowledgeBase extends KnowledgeBaseSummary {
type KnowledgeGraphIndexProgress (line 47) | type KnowledgeGraphIndexProgress = {
type KnowledgeBaseSplitterType (line 56) | type KnowledgeBaseSplitterType = KnowledgeBaseChunkingSplitterRule['spli...
type KnowledgeBaseChunkingSentenceSplitterConfig (line 58) | type KnowledgeBaseChunkingSentenceSplitterConfig = {
type KnowledgeBaseChunkingMarkdownSplitterConfig (line 64) | type KnowledgeBaseChunkingMarkdownSplitterConfig = {
type KnowledgeBaseChunkingSentenceSplitterRule (line 69) | type KnowledgeBaseChunkingSentenceSplitterRule = {
type KnowledgeBaseChunkingMarkdownSplitterRule (line 74) | type KnowledgeBaseChunkingMarkdownSplitterRule = {
type KnowledgeBaseChunkingSplitterRule (line 79) | type KnowledgeBaseChunkingSplitterRule = KnowledgeBaseChunkingSentenceSp...
type KnowledgeBaseChunkingConfigGeneral (line 81) | type KnowledgeBaseChunkingConfigGeneral = {
type KnowledgeBaseChunkingConfigAdvanced (line 85) | type KnowledgeBaseChunkingConfigAdvanced = {
type KnowledgeBaseChunkingConfig (line 93) | type KnowledgeBaseChunkingConfig = KnowledgeBaseChunkingConfigGeneral | ...
type KnowledgeGraphDocumentChunk (line 95) | type KnowledgeGraphDocumentChunk = z.infer<typeof knowledgeGraphDocument...
function listKnowledgeBases (line 201) | async function listKnowledgeBases ({ page = 1, size = 10 }: PageParams) {
function getKnowledgeBaseById (line 208) | async function getKnowledgeBaseById (id: number): Promise<KnowledgeBase> {
function getKnowledgeBaseDocumentChunks (line 215) | async function getKnowledgeBaseDocumentChunks (id: number, documentId: n...
function getKnowledgeBaseDocument (line 222) | async function getKnowledgeBaseDocument (id: number, documentId: number) {
function getKnowledgeBaseLinkedChatEngines (line 229) | async function getKnowledgeBaseLinkedChatEngines (id: number) {
function deleteKnowledgeBaseDocument (line 236) | async function deleteKnowledgeBaseDocument (id: number, documentId: numb...
function rebuildKBDocumentIndex (line 244) | async function rebuildKBDocumentIndex (kb_id: number, doc_id: number) {
function createKnowledgeBase (line 252) | async function createKnowledgeBase (params: CreateKnowledgeBaseParams) {
function updateKnowledgeBase (line 263) | async function updateKnowledgeBase (id: number, params: UpdateKnowledgeB...
function getKnowledgeGraphIndexProgress (line 274) | async function getKnowledgeGraphIndexProgress (id: number): Promise<Know...
function listKnowledgeBaseVectorIndexErrors (line 280) | async function listKnowledgeBaseVectorIndexErrors (id: number, { page = ...
function listKnowledgeBaseKgIndexErrors (line 286) | async function listKnowledgeBaseKgIndexErrors (id: number, { page = 1, s...
function retryKnowledgeBaseAllFailedTasks (line 292) | async function retryKnowledgeBaseAllFailedTasks (id: number) {
function deleteKnowledgeBase (line 302) | async function deleteKnowledgeBase (id: number) {
FILE: frontend/app/src/api/llms.ts
type LLMSummary (line 6) | interface LLMSummary {
type LLM (line 14) | interface LLM extends LLMSummary {
type LlmOption (line 20) | interface LlmOption extends ProviderOption {
type CreateLLM (line 25) | interface CreateLLM {
type UpdateLLM (line 34) | interface UpdateLLM {
function listLlmOptions (line 59) | async function listLlmOptions () {
function listLlms (line 68) | async function listLlms ({ page = 1, size = 10 }: PageParams = {}): Prom...
function getLlm (line 75) | async function getLlm (id: number): Promise<LLM> {
function createLlm (line 81) | async function createLlm (create: CreateLLM) {
function updateLlm (line 92) | async function updateLlm (id: number, update: UpdateLLM) {
function deleteLlm (line 103) | async function deleteLlm (id: number) {
function testLlm (line 110) | async function testLlm (createLLM: CreateLLM) {
FILE: frontend/app/src/api/providers.ts
type ProviderOption (line 3) | interface ProviderOption {
FILE: frontend/app/src/api/rag.ts
type IndexStatus (line 11) | type IndexStatus = typeof indexStatuses[number];
type IndexProgress (line 13) | type IndexProgress = Partial<Record<IndexStatus, number>>
type IndexTotalStats (line 15) | type IndexTotalStats = {
FILE: frontend/app/src/api/rerankers.ts
type Reranker (line 6) | interface Reranker {
type RerankerOption (line 18) | interface RerankerOption extends ProviderOption {
type CreateReranker (line 24) | interface CreateReranker {
type UpdateReranker (line 34) | interface UpdateReranker {
function listRerankerOptions (line 59) | async function listRerankerOptions () {
function listRerankers (line 68) | async function listRerankers ({ page = 1, size = 10 }: PageParams = {}):...
function getReranker (line 75) | async function getReranker (id: number): Promise<Reranker> {
function createReranker (line 81) | async function createReranker (create: CreateReranker) {
function updateReranker (line 92) | async function updateReranker (id: number, update: UpdateReranker) {
function deleteReranker (line 103) | async function deleteReranker (id: number) {
function testReranker (line 110) | async function testReranker (createReranker: CreateReranker) {
FILE: frontend/app/src/api/site-settings.ts
type SettingItemBase (line 4) | interface SettingItemBase<K, T> {
type IntSettingItem (line 14) | type IntSettingItem = SettingItemBase<'int', number>
type FloatSettingItem (line 15) | type FloatSettingItem = SettingItemBase<'float', number>
type BoolSettingItem (line 16) | type BoolSettingItem = SettingItemBase<'bool', boolean>
type StringSettingItem (line 17) | type StringSettingItem = SettingItemBase<'str', string>
type ListSettingItem (line 18) | type ListSettingItem = SettingItemBase<'list', any[]>
type DictSettingItem (line 19) | type DictSettingItem = SettingItemBase<'dict', object>
type SettingItem (line 21) | type SettingItem =
type PublicWebsiteSettings (line 29) | interface PublicWebsiteSettings {
type AllSettings (line 89) | type AllSettings = Record<string, SettingItem>
function getAllSiteSettings (line 91) | async function getAllSiteSettings (): Promise<AllSettings> {
function updateSiteSetting (line 99) | async function updateSiteSetting (name: string, value: any) {
function getPublicSiteSettings (line 110) | async function getPublicSiteSettings (): Promise<PublicWebsiteSettings> {
FILE: frontend/app/src/api/stats.ts
type TrendBaseItem (line 6) | interface TrendBaseItem {
type ChatUserTrendItem (line 10) | interface ChatUserTrendItem extends TrendBaseItem {
type ChatOriginTrendItem (line 15) | type ChatOriginTrendItem = TrendBaseItem & Omit<{
type TrendResponse (line 19) | interface TrendResponse<T> {
function trendResponse (line 45) | function trendResponse<T> (item: ZodType<T, any, any>): ZodType<TrendRes...
function trendParams (line 53) | function trendParams (start: Date, end: Date) {
function getChatUserTrend (line 60) | async function getChatUserTrend (startDate: Date, endDate: Date) {
function getChatOriginTrend (line 69) | async function getChatOriginTrend (startDate: Date, endDate: Date) {
FILE: frontend/app/src/api/system.ts
type RequiredBootstrapStatus (line 5) | interface RequiredBootstrapStatus {
type OptionalBootstrapStatus (line 12) | interface OptionalBootstrapStatus {
type NeedMigrationStatus (line 17) | interface NeedMigrationStatus {
type BootstrapStatus (line 21) | interface BootstrapStatus {
function getBootstrapStatus (line 49) | async function getBootstrapStatus (): Promise<BootstrapStatus> {
function isBootstrapStatusPassed (line 58) | function isBootstrapStatusPassed (bootstrapStatus: BootstrapStatus): boo...
FILE: frontend/app/src/api/users.ts
type MeInfo (line 4) | interface MeInfo {
function getMe (line 20) | async function getMe (): Promise<MeInfo> {
FILE: frontend/app/src/app/(experimental)/experimental-features/route.ts
function GET (line 4) | function GET () {
FILE: frontend/app/src/app/(main)/(.)auth/login/loading.tsx
function Loading (line 5) | function Loading () {
FILE: frontend/app/src/app/(main)/(.)auth/login/page.client.tsx
function SigninDialog (line 7) | function SigninDialog ({ callbackUrl }: { callbackUrl?: string }) {
FILE: frontend/app/src/app/(main)/(.)auth/login/page.tsx
function Page (line 4) | async function Page () {
FILE: frontend/app/src/app/(main)/(admin)/chat-engines/[id]/page.tsx
function ChatEnginePage (line 7) | async function ChatEnginePage(props: { params: Promise<{ id: string }> }) {
FILE: frontend/app/src/app/(main)/(admin)/chat-engines/new/page.tsx
function NewChatEnginePage (line 5) | async function NewChatEnginePage () {
FILE: frontend/app/src/app/(main)/(admin)/chat-engines/page.tsx
function ChatEnginesPage (line 5) | function ChatEnginesPage () {
FILE: frontend/app/src/app/(main)/(admin)/embedding-models/[id]/page.tsx
function Page (line 5) | async function Page (props: { params: Promise<{ id: string }> }) {
FILE: frontend/app/src/app/(main)/(admin)/embedding-models/create/page.tsx
function Page (line 8) | function Page () {
FILE: frontend/app/src/app/(main)/(admin)/embedding-models/page.tsx
function EmbeddingModelPage (line 8) | function EmbeddingModelPage () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/items/[itemId]/page.tsx
function Page (line 9) | function Page (props: { params: Promise<{ id: string, itemId: string }> ...
FILE: frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/items/new/page.tsx
function CreateEvaluationDatasetItemPage (line 11) | function CreateEvaluationDatasetItemPage (props: { params: Promise<{ id:...
FILE: frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/not-found.tsx
function NotFound (line 4) | function NotFound () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/datasets/[id]/page.tsx
function EvaluationDatasetPage (line 12) | function EvaluationDatasetPage (props: { params: Promise<{ id: string }>...
FILE: frontend/app/src/app/(main)/(admin)/evaluation/datasets/create/page.tsx
function EvaluationTaskPage (line 9) | function EvaluationTaskPage () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/datasets/page.tsx
function EvaluationDatasetsPage (line 5) | function EvaluationDatasetsPage () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/page.tsx
function EvaluationsPage (line 3) | function EvaluationsPage () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/tasks/[id]/not-found.tsx
function NotFound (line 4) | function NotFound () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/tasks/[id]/page.tsx
function EvaluationTaskPage (line 10) | function EvaluationTaskPage (props: { params: Promise<{ id: string }> }) {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/tasks/create/page.tsx
function EvaluationTaskPage (line 9) | function EvaluationTaskPage () {
FILE: frontend/app/src/app/(main)/(admin)/evaluation/tasks/page.tsx
function EvaluationTasksPage (line 5) | function EvaluationTasksPage () {
FILE: frontend/app/src/app/(main)/(admin)/feedbacks/page.tsx
function ChatEnginesPage (line 4) | function ChatEnginesPage () {
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(special)/data-sources/new/page.tsx
function NewKnowledgeBaseDataSourcePage (line 12) | function NewKnowledgeBaseDataSourcePage (props: { params: Promise<{ id: ...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(special)/documents/[documentId]/chunks/page.tsx
function DocumentChunksPage (line 14) | function DocumentChunksPage(props: { params: Promise<{ id: string, docum...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/data-sources/page.tsx
function KnowledgeBaseDataSourcesPage (line 11) | function KnowledgeBaseDataSourcesPage(props: { params: Promise<{ id: str...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/index-progress/page.tsx
function KnowledgeBaseIndexProgressPage (line 3) | async function KnowledgeBaseIndexProgressPage(props: { params: Promise<{...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/knowledge-graph-explorer/create-synopsis-entity/page.tsx
function CreateSynopsisEntityPage (line 9) | function CreateSynopsisEntityPage(props: { params: Promise<{ id: string ...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/knowledge-graph-explorer/page.tsx
function KnowledgeGraphExplorerPage (line 3) | async function KnowledgeGraphExplorerPage(props: { params: Promise<{ id:...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/layout.tsx
function KnowledgeBaseLayout (line 14) | function KnowledgeBaseLayout(props: { params: Promise<{ id: string }>, c...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/page.tsx
function KnowledgeBasePage (line 3) | async function KnowledgeBasePage(props: { params: Promise<{ id: string }...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/settings/page.tsx
function KnowledgeBaseSettingsPage (line 4) | async function KnowledgeBaseSettingsPage(props: { params: Promise<{ id: ...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/(tabs)/tabs.tsx
function KnowledgeBaseTabs (line 6) | function KnowledgeBaseTabs ({ knowledgeBaseId }: { knowledgeBaseId: numb...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/[id]/context.tsx
function KBProvider (line 8) | function KBProvider ({ children, value }: { children: ReactNode, value: ...
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/new/page.tsx
function NewKnowledgeBasePage (line 6) | function NewKnowledgeBasePage () {
FILE: frontend/app/src/app/(main)/(admin)/knowledge-bases/page.tsx
function KnowledgeBasesPage (line 10) | function KnowledgeBasesPage () {
FILE: frontend/app/src/app/(main)/(admin)/layout.tsx
function Layout (line 5) | async function Layout ({ children }: { children: ReactNode }) {
FILE: frontend/app/src/app/(main)/(admin)/llms/[id]/page.tsx
function Page (line 6) | async function Page (props: { params: Promise<{ id: string }> }) {
FILE: frontend/app/src/app/(main)/(admin)/llms/create/page.tsx
function Page (line 8) | function Page () {
FILE: frontend/app/src/app/(main)/(admin)/llms/page.tsx
function Page (line 6) | function Page () {
FILE: frontend/app/src/app/(main)/(admin)/reranker-models/[id]/page.tsx
function Page (line 6) | async function Page (props: { params: Promise<{ id: string }> }) {
FILE: frontend/app/src/app/(main)/(admin)/reranker-models/create/page.tsx
function Page (line 8) | function Page () {
FILE: frontend/app/src/app/(main)/(admin)/reranker-models/page.tsx
function Page (line 6) | function Page () {
FILE: frontend/app/src/app/(main)/(admin)/site-settings/custom_js/page.tsx
function CustomJsSettingsPage (line 5) | async function CustomJsSettingsPage () {
FILE: frontend/app/src/app/(main)/(admin)/site-settings/integrations/page.tsx
function LangfuseSettingsPage (line 4) | async function LangfuseSettingsPage () {
FILE: frontend/app/src/app/(main)/(admin)/site-settings/layout.tsx
function SiteSettingsLayout (line 7) | function SiteSettingsLayout ({ children }: { children: ReactNode }) {
FILE: frontend/app/src/app/(main)/(admin)/site-settings/page.tsx
function SiteSettingsPage (line 4) | async function SiteSettingsPage () {
FILE: frontend/app/src/app/(main)/(admin)/stats/trending/page.tsx
function Page (line 12) | function Page () {
FILE: frontend/app/src/app/(main)/(user)/api-keys/page.tsx
function ChatEnginesPage (line 40) | function ChatEnginesPage () {
function DeleteButton (line 104) | function DeleteButton ({ apiKey }: { apiKey: ApiKey }) {
FILE: frontend/app/src/app/(main)/(user)/c/page.tsx
function ConversationsListPage (line 5) | async function ConversationsListPage () {
FILE: frontend/app/src/app/(main)/(user)/layout.tsx
function Layout (line 5) | async function Layout ({ children }: { children: ReactNode }) {
FILE: frontend/app/src/app/(main)/c/[id]/page.tsx
function ChatDetailPage (line 26) | async function ChatDetailPage(props: { params: Promise<{ id: string }> }) {
function generateMetadata (line 88) | async function generateMetadata(props: { params: Promise<{ id: string }>...
FILE: frontend/app/src/app/(main)/layout.tsx
function Layout (line 10) | function Layout ({ children }: {
FILE: frontend/app/src/app/(main)/nav.tsx
function SiteSidebar (line 26) | function SiteSidebar ({ setting }: { setting: PublicWebsiteSettings }) {
function NavContent (line 42) | function NavContent () {
function NavFooter (line 130) | function NavFooter () {
function NavWarningDetails (line 174) | function NavWarningDetails ({ children }: { children?: ReactNode }) {
function CountSpan (line 192) | function CountSpan ({ children }: { children?: ReactNode }) {
function KnowledgeBaseNavDetails (line 196) | function KnowledgeBaseNavDetails () {
function ChatEnginesNavDetails (line 206) | function ChatEnginesNavDetails () {
FILE: frontend/app/src/app/(main)/page.tsx
function Page (line 15) | function Page () {
function isHighlightedLinkText (line 77) | function isHighlightedLinkText (text: string) {
function trimHighlightedLinkText (line 81) | function trimHighlightedLinkText (text: string) {
FILE: frontend/app/src/app/RootProviders.tsx
type RootProvidersProps (line 18) | interface RootProvidersProps {
function RootProviders (line 26) | function RootProviders ({ me, settings, bootstrapStatus, experimentalFea...
FILE: frontend/app/src/app/api/[[...fallback_placeholder]]/route.ts
type RequestInit (line 5) | interface RequestInit {
function handler (line 10) | function handler (request: NextRequest) {
function originalUrl (line 42) | function originalUrl (request: NextRequest) {
FILE: frontend/app/src/app/auth/login/page.tsx
function Page (line 4) | async function Page() {
FILE: frontend/app/src/app/layout.tsx
function generateMetadata (line 19) | async function generateMetadata (): Promise<Metadata> {
function RootLayout (line 28) | async function RootLayout ({
FILE: frontend/app/src/components/admin-page-heading.tsx
type BreadcrumbItem (line 10) | interface BreadcrumbItem {
type TableHeadingProps (line 20) | interface TableHeadingProps {
function AdminPageHeading (line 24) | function AdminPageHeading ({ breadcrumbs }: TableHeadingProps) {
FILE: frontend/app/src/components/admin-page-layout.tsx
function AdminPageLayout (line 3) | function AdminPageLayout ({ children }: { children: ReactNode }) {
FILE: frontend/app/src/components/api-keys/CreateApiKeyForm.tsx
type CreateApiKeyFormProps (line 10) | interface CreateApiKeyFormProps {
function CreateApiKeyForm (line 19) | function CreateApiKeyForm ({ onCreated }: CreateApiKeyFormProps) {
FILE: frontend/app/src/components/auth/AuthProvider.tsx
type AuthContextValues (line 4) | interface AuthContextValues {
function AuthProvider (line 13) | function AuthProvider ({ children, ...context }: AuthContextValues & { c...
function useAuth (line 21) | function useAuth () {
FILE: frontend/app/src/components/auto-scroll/auto-scroll.stories.tsx
type Story (line 20) | type Story = StoryObj<typeof meta>
function AutoScrollByObservingChildren (line 82) | function AutoScrollByObservingChildren ({ target }: { target: RefObject<...
FILE: frontend/app/src/components/auto-scroll/auto-scroll.tsx
type AutoScrollProps (line 6) | interface AutoScrollProps {
function AutoScroll (line 16) | function AutoScroll ({ target, edgePixels = 0, children }: AutoScrollPro...
FILE: frontend/app/src/components/auto-scroll/context.ts
type ScrollInfo (line 5) | type ScrollInfo = {
type ScrollEdge (line 13) | type ScrollEdge = 'top' | 'left' | 'right' | 'bottom';
type ScrollHandler (line 14) | type ScrollHandler = (info: ScrollInfo) => void;
type AutoScrollContextValues (line 16) | interface AutoScrollContextValues {
method registerVoter (line 31) | registerVoter (id: string, defaultScroll: boolean) {}
method unregisterVoter (line 32) | unregisterVoter (id: string) {}
method voteAutoScroll (line 33) | voteAutoScroll (id: string) {}
method cancelVoteAutoScroll (line 34) | cancelVoteAutoScroll (id: string) {}
method useScroll (line 35) | useScroll () {}
method requestScroll (line 36) | requestScroll () {}
FILE: frontend/app/src/components/auto-scroll/manual-scroll-voter.tsx
function ManualScrollVoter (line 10) | function ManualScrollVoter () {
FILE: frontend/app/src/components/auto-scroll/use-auto-scroll-voter.ts
function useAutoScrollVoter (line 6) | function useAutoScrollVoter (defaultScroll: boolean) {
FILE: frontend/app/src/components/auto-scroll/use-request-scroll.ts
function useRequestScroll (line 6) | function useRequestScroll () {
FILE: frontend/app/src/components/branding.tsx
function Branding (line 4) | function Branding ({ setting }: { setting: PublicWebsiteSettings }) {
FILE: frontend/app/src/components/cells/actions.tsx
type CellAction (line 12) | interface CellAction {
type ActionUIContext (line 22) | interface ActionUIContext {
function actions (line 30) | function actions<Row> (items: (row: Row) => CellAction[]) {
function Action (line 57) | function Action ({ item, open, setOpen }: { item: CellAction, open: bool...
FILE: frontend/app/src/components/cells/boolean.tsx
function boolean (line 4) | function boolean (props: CellContext<any, boolean | undefined | null>) {
FILE: frontend/app/src/components/cells/datetime.tsx
function datetime (line 4) | function datetime (props: CellContext<any, Date | null | undefined>) {
FILE: frontend/app/src/components/cells/error-message.tsx
function errorMessageCell (line 5) | function errorMessageCell<Row> (trimLength = 25) {
function AutoErrorMessagePopper (line 11) | function AutoErrorMessagePopper ({ trimLength = 25, children }: { trimLe...
FILE: frontend/app/src/components/cells/link.tsx
type LinkCellProps (line 4) | interface LinkCellProps<Row> {
function link (line 19) | function link<Row> ({ icon, url, text, truncate, truncate_length }: Link...
FILE: frontend/app/src/components/cells/percent.tsx
type PercentCellConfig (line 7) | interface PercentCellConfig {
function getStyle (line 39) | function getStyle (value: number, stops: {
FILE: frontend/app/src/components/cells/reference.tsx
function DatasourceCell (line 5) | function DatasourceCell ({ id, name }: { id: number, name: string }) {
function KnowledgeBaseCell (line 9) | function KnowledgeBaseCell ({ id, name }: { id?: number, name?: string }) {
FILE: frontend/app/src/components/charts/IndexProgressChart.stories.tsx
type Story (line 33) | type Story = StoryObj<typeof meta>
method render (line 54) | render ({ ...args }) {
FILE: frontend/app/src/components/charts/IndexProgressChart.tsx
function IndexProgressChart (line 45) | function IndexProgressChart ({ title, description, label, data }: { titl...
type IndexProgressChartPlaceholderProps (line 121) | interface IndexProgressChartPlaceholderProps {
function IndexProgressChartPlaceholder (line 127) | function IndexProgressChartPlaceholder ({ title, label, description }: I...
FILE: frontend/app/src/components/charts/TotalCard.stories.tsx
type Story (line 32) | type Story = StoryObj<typeof meta>
FILE: frontend/app/src/components/charts/TotalCard.tsx
type TotalCardProps (line 8) | interface TotalCardProps {
function TotalCard (line 16) | function TotalCard ({ isLoading = false, title, icon, total, children }:...
FILE: frontend/app/src/components/charts/TrendsChart.tsx
function TrendsChart (line 11) | function TrendsChart<T extends { date: Date }> ({
FILE: frontend/app/src/components/chat-engine/chat-engines-table.tsx
function ChatEnginesTable (line 79) | function ChatEnginesTable () {
function NameLink (line 90) | function NameLink ({ chatEngine }: { chatEngine: ChatEngine }) {
FILE: frontend/app/src/components/chat-engine/create-chat-engine-form.tsx
function CreateChatEngineForm (line 49) | function CreateChatEngineForm ({ defaultChatEngineOptions }: { defaultCh...
function SectionTabTrigger (line 224) | function SectionTabTrigger ({ value, required }: { value: string, requir...
function Section (line 252) | function Section ({ title, children }: { title: string, children: ReactN...
function SubSection (line 262) | function SubSection ({ title, children }: { title: ReactNode, children: ...
FILE: frontend/app/src/components/chat-engine/hooks.ts
function useAllChatEngines (line 5) | function useAllChatEngines (onlyPublic: boolean = false) {
FILE: frontend/app/src/components/chat-engine/kb-list-select.tsx
function KBListSelect (line 12) | function KBListSelect ({ ref, disabled, value, onChange, ...props }: For...
function KBListSelectForObjectValue (line 109) | function KBListSelectForObjectValue ({ value, onChange, ...props }: Form...
FILE: frontend/app/src/components/chat-engine/update-chat-engine-form.tsx
function UpdateChatEngineForm (line 20) | function UpdateChatEngineForm ({ chatEngine, defaultChatEngineOptions }:...
function optionAccessor (line 232) | function optionAccessor<K extends keyof ChatEngineOptions> (key: K): Gen...
function kgOptionAccessor (line 250) | function kgOptionAccessor<K extends keyof ChatEngineKnowledgeGraphOption...
function llmOptionAccessor (line 271) | function llmOptionAccessor<K extends keyof ChatEngineLLMOptions> (key: K...
method get (line 295) | get (data) {
method set (line 298) | set () {
method get (line 330) | get (data) {
method set (line 334) | set (data, value) {
method get (line 388) | get (engine) {
method set (line 391) | set (engine, value) {
function Section (line 405) | function Section ({ title, children }: { title: string, children: ReactN...
function SubSection (line 415) | function SubSection ({ title, children }: { title: ReactNode, children: ...
FILE: frontend/app/src/components/chat/ask.tsx
function Ask (line 6) | function Ask ({ className, loading, disabled, ask, engine, setEngine }: ...
FILE: frontend/app/src/components/chat/chat-controller.ts
type ChatControllerEventsMap (line 10) | interface ChatControllerEventsMap<State = AppChatStreamState, Annotation...
class ChatController (line 34) | class ChatController<State extends AppChatStreamState = AppChatStreamSta...
method postState (line 45) | get postState () {
method constructor (line 53) | constructor (
method inputElement (line 76) | get inputElement () {
method inputElement (line 80) | set inputElement (value: HTMLInputElement | HTMLTextAreaElement | null) {
method _enabledInputElement (line 104) | private get _enabledInputElement () {
method inputEnabled (line 117) | get inputEnabled () {
method input (line 125) | get input (): string {
method input (line 129) | set input (value: string) {
method focusInput (line 136) | focusInput () {
method messages (line 143) | get messages (): (ChatMessageController | StackVMChatMessageController...
method post (line 147) | async post (params: Omit<PostChatParams, 'chat_id'>) {
method regenerate (line 201) | async regenerate (messageId: number) {
method updateChat (line 205) | updateChat (chat: Chat): void {
method upsertMessage (line 219) | upsertMessage (message: ChatMessage): void {
method _processPart (line 229) | _processPart (ongoingMessageController: ChatMessageController | StackV...
method _processDataPart (line 263) | private _processDataPart (ongoingMessageController: ChatMessageControl...
method _processMessageAnnotationPart (line 278) | private _processMessageAnnotationPart (ongoingMessageController: ChatM...
method _processTextPart (line 284) | private _processTextPart (ongoingMessageController: ChatMessageControl...
method _processErrorPart (line 291) | private _processErrorPart (ongoingMessageController: ChatMessageContro...
method _processToolCallPart (line 296) | private _processToolCallPart (ongoingMessageController: ChatMessageCon...
method _processToolResultPart (line 301) | private _processToolResultPart (ongoingMessageController: ChatMessageC...
method createMessage (line 306) | private createMessage (message: ChatMessage, initialOngoingState?: tru...
method createLegacyMessage (line 318) | private createLegacyMessage (message: ChatMessage, initialOngoingState...
method createStackVMMessage (line 325) | private createStackVMMessage (message: ChatMessage, initialOngoingStat...
function assertNonNull (line 333) | function assertNonNull<T> (value: T, message: string, ...args: any): ass...
FILE: frontend/app/src/components/chat/chat-hooks.tsx
type ChatsProviderValues (line 11) | interface ChatsProviderValues {
method newChat (line 23) | newChat (): ChatController {
method destroyChat (line 26) | destroyChat () {
function ChatsProvider (line 33) | function ChatsProvider ({ onChatCreated, children }: { children: ReactNo...
function ChatControllerProvider (line 71) | function ChatControllerProvider ({ controller, children }: { controller:...
function useChats (line 80) | function useChats () {
type ChatMessageGroup (line 84) | interface ChatMessageGroup {
function useChatController (line 92) | function useChatController (
function useChatInfo (line 122) | function useChatInfo (controller: ChatController) {
function useChatPostState (line 143) | function useChatPostState (controller: ChatController | undefined) {
function useChatMessageControllers (line 173) | function useChatMessageControllers (controller: ChatController) {
function useChatMessageGroups (line 195) | function useChatMessageGroups (controllers: ChatMessageController[]) {
function collectMessageGroups (line 205) | function collectMessageGroups (messageControllers: ChatMessageController...
function useCurrentChatController (line 246) | function useCurrentChatController () {
function useChatMessageField (line 258) | function useChatMessageField (controller: ChatMessageController | undefi...
function useChatMessageStreamState (line 289) | function useChatMessageStreamState<C extends ChatMessageController> (con...
function useChatMessageStreamHistoryStates (line 317) | function useChatMessageStreamHistoryStates<C extends BaseChatMessageCont...
function useChatMessageStreamContainsState (line 347) | function useChatMessageStreamContainsState (controller: ChatMessageContr...
FILE: frontend/app/src/components/chat/chat-message-controller.ts
type OngoingState (line 6) | interface OngoingState<State = AppChatStreamState> {
type OngoingStateHistoryItem (line 13) | interface OngoingStateHistoryItem<State = AppChatStreamState> {
type ChatMessageControllerEventsMap (line 18) | interface ChatMessageControllerEventsMap<State = AppChatStreamState> {
method constructor (line 39) | constructor (message: ChatMessage, ongoing: OngoingState<State> | true |...
method content (line 53) | get content () {
method update (line 57) | update (message: ChatMessage) {
method applyStreamAnnotation (line 62) | applyStreamAnnotation (annotation: Annotation) {
method applyDelta (line 103) | applyDelta (delta: string) {
method applyError (line 115) | applyError (error: string) {
method applyToolCall (line 132) | applyToolCall ({ toolCallId, toolName, args }: { toolCallId: string, too...
method applyToolResult (line 136) | applyToolResult ({ toolCallId, result }: { toolCallId: string, result: a...
method finish (line 140) | finish () {
method message (line 146) | get message (): ChatMessage {
method ongoing (line 150) | get ongoing () {
method ongoingHistory (line 154) | get ongoingHistory () {
type ChatMessageController (line 167) | type ChatMessageController = LegacyChatMessageController | StackVMChatMe...
type ChatMessageControllerAnnotationState (line 168) | type ChatMessageControllerAnnotationState<C extends ChatMessageControlle...
class LegacyChatMessageController (line 170) | class LegacyChatMessageController extends BaseChatMessageController<AppC...
method parseAnnotation (line 173) | parseAnnotation (raw: unknown): ChatMessageAnnotation {
method createInitialOngoingState (line 177) | createInitialOngoingState (): OngoingState {
method createUnknownOngoingState (line 185) | createUnknownOngoingState (): OngoingState {
method _polishMessage (line 193) | _polishMessage (message: ChatMessage, ongoing: OngoingState, annotatio...
class StackVMChatMessageController (line 212) | class StackVMChatMessageController extends BaseChatMessageController<Sta...
method applyToolCall (line 215) | applyToolCall (payload: { toolCallId: string; toolName: string; args: ...
method applyToolResult (line 229) | applyToolResult (payload: { toolCallId: string; result: any }) {
method parseAnnotation (line 245) | parseAnnotation (raw: unknown): StackVMStateAnnotation {
method createInitialOngoingState (line 255) | createInitialOngoingState (): OngoingState<StackVMState> {
method createUnknownOngoingState (line 282) | createUnknownOngoingState (): OngoingState<StackVMState> {
method _polishMessage (line 309) | _polishMessage (message: ChatMessage): ChatMessage {
FILE: frontend/app/src/components/chat/chat-new-dialog.tsx
function ChatNewDialog (line 10) | function ChatNewDialog () {
FILE: frontend/app/src/components/chat/chat-stream-state.ts
type BaseState (line 14) | const enum BaseState {
type AppChatStreamState (line 19) | const enum AppChatStreamState {
type StackVMState (line 33) | type StackVMState = {
type StackVMToolCall (line 41) | type StackVMToolCall = { toolCallId: string, toolName: string, args: any...
type BaseAnnotation (line 43) | interface BaseAnnotation<S = AppChatStreamState> {
type TraceAnnotation (line 48) | interface TraceAnnotation extends BaseAnnotation<AppChatStreamState.TRAC...
type SourceNodesAnnotation (line 52) | interface SourceNodesAnnotation extends BaseAnnotation<AppChatStreamStat...
type RefineQuestionAnnotation (line 56) | interface RefineQuestionAnnotation extends BaseAnnotation<AppChatStreamS...
type ChatMessageAnnotation (line 60) | type ChatMessageAnnotation =
type StackVMStateAnnotation (line 66) | interface StackVMStateAnnotation extends BaseAnnotation<StackVMState> {
type ChatInitialData (line 69) | type ChatInitialData = {
function fixChatInitialData (line 76) | function fixChatInitialData (data: any) {
FILE: frontend/app/src/components/chat/chats-history.tsx
function ChatsHistory (line 13) | function ChatsHistory () {
FILE: frontend/app/src/components/chat/chats-table.tsx
function ChatsTable (line 12) | function ChatsTable () {
FILE: frontend/app/src/components/chat/conversation-message-groups.tsx
function ConversationMessageGroups (line 22) | function ConversationMessageGroups ({ groups }: { groups: ChatMessageGro...
function ConversationMessageGroup (line 72) | function ConversationMessageGroup ({ group, isLastGroup }: { group: Chat...
FILE: frontend/app/src/components/chat/conversation.tsx
type ConversationProps (line 13) | interface ConversationProps {
function Conversation (line 28) | function Conversation ({ open, chat, chatId, history, placeholder, preve...
FILE: frontend/app/src/components/chat/debug-info.tsx
type DebugInfoProps (line 10) | interface DebugInfoProps {
function DebugInfo (line 14) | function DebugInfo ({ group }: DebugInfoProps) {
FILE: frontend/app/src/components/chat/knowledge-graph-debug-info.tsx
function KnowledgeGraphDebugInfo (line 13) | function KnowledgeGraphDebugInfo ({ group }: { group: ChatMessageGroup }) {
function couldFetchKnowledgeGraphDebugInfo (line 94) | function couldFetchKnowledgeGraphDebugInfo (state: OngoingState<AppChatS...
FILE: frontend/app/src/components/chat/message-annotation-history-stackvm.tsx
function StackVMMessageAnnotationHistory (line 14) | function StackVMMessageAnnotationHistory ({ message }: { message: StackV...
function StackVMCheckpoint (line 120) | function StackVMCheckpoint ({ state, pc }: { state: StackVMState, pc: bo...
function StackVMDetails (line 141) | function StackVMDetails ({ pc, state }: { state: StackVMState, pc: boole...
function ToolCallInfo (line 177) | function ToolCallInfo ({ vars, step, pc, toolCalls }: { step: StackVM.mo...
function JsonValueViewer (line 215) | function JsonValueViewer ({ value }: { value: unknown }) {
function MessageAnnotationHistoryItem (line 268) | function MessageAnnotationHistoryItem ({ history, item: { state, time },...
function MessageAnnotationHistoryError (line 281) | function MessageAnnotationHistoryError ({ history, error }: { history: O...
function MessageAnnotationCurrent (line 293) | function MessageAnnotationCurrent ({ history, current }: { history: Ongo...
FILE: frontend/app/src/components/chat/message-annotation-history.tsx
function MessageAnnotationHistory (line 9) | function MessageAnnotationHistory ({ message }: { message: LegacyChatMes...
function MessageAnnotationHistoryItem (line 76) | function MessageAnnotationHistoryItem ({ history, item: { state, time },...
function MessageAnnotationHistoryError (line 90) | function MessageAnnotationHistoryError ({ history, error }: { history: O...
function MessageAnnotationCurrent (line 102) | function MessageAnnotationCurrent ({ history, current }: { history: Ongo...
FILE: frontend/app/src/components/chat/message-answer.tsx
function MessageAnswer (line 7) | function MessageAnswer ({ message, showBetaAlert }: { message: ChatMessa...
FILE: frontend/app/src/components/chat/message-auto-scroll.tsx
function MessageAutoScroll (line 9) | function MessageAutoScroll ({ message }: { message: ChatMessageControlle...
FILE: frontend/app/src/components/chat/message-beta-alert.tsx
function MessageBetaAlert (line 4) | function MessageBetaAlert () {
FILE: frontend/app/src/components/chat/message-content-sources.tsx
function MessageContextSources (line 13) | function MessageContextSources ({ message }: { message: ChatMessageContr...
function MessageContextSource (line 62) | function MessageContextSource ({ index, animation, context }: { index: n...
function MessageContextSourceCard (line 88) | function MessageContextSourceCard ({ title, href }: { title?: string, hr...
FILE: frontend/app/src/components/chat/message-content.tsx
function MessageContent (line 5) | function MessageContent ({ message }: { message: ChatMessageController |...
FILE: frontend/app/src/components/chat/message-error.tsx
function MessageError (line 7) | function MessageError ({ message }: { message: ChatMessageController }) {
FILE: frontend/app/src/components/chat/message-feedback.tsx
function MessageFeedback (line 10) | function MessageFeedback ({ initial, onFeedback, defaultAction, children...
FILE: frontend/app/src/components/chat/message-input.tsx
type MessageInputProps (line 16) | interface MessageInputProps {
function MessageInput (line 26) | function MessageInput ({
FILE: frontend/app/src/components/chat/message-operations.tsx
function MessageOperations (line 14) | function MessageOperations ({ message }: { message: ChatMessageControlle...
FILE: frontend/app/src/components/chat/message-recommend-questions.tsx
function MessageRecommendQuestions (line 13) | function MessageRecommendQuestions ({ assistant }: { assistant: ChatMess...
FILE: frontend/app/src/components/chat/message-section.tsx
function MessageSection (line 8) | function MessageSection ({ className, message, children }: { className?:...
FILE: frontend/app/src/components/chat/testutils.ts
function createExampleInitialChatMessage (line 3) | function createExampleInitialChatMessage (): ChatMessage {
FILE: frontend/app/src/components/chat/use-ask.ts
function useAsk (line 8) | function useAsk (onFinish?: () => void) {
type UseAskReturns (line 57) | type UseAskReturns = ReturnType<typeof useAsk>;
FILE: frontend/app/src/components/chat/use-message-feedback.ts
type UseMessageFeedbackReturns (line 6) | interface UseMessageFeedbackReturns {
function useMessageFeedback (line 13) | function useMessageFeedback (messageId: number | undefined, enabled: boo...
FILE: frontend/app/src/components/chat/utils.ts
function parseSource (line 13) | function parseSource (uri?: string) {
function parseHref (line 24) | function parseHref (source: ChatMessageSource): { href: string, download...
function isNotFinished (line 34) | function isNotFinished (ongoing: OngoingState<any> | undefined) {
FILE: frontend/app/src/components/config-viewer.tsx
function ConfigViewer (line 10) | function ConfigViewer ({ value: propValue }: { value: any }) {
FILE: frontend/app/src/components/copy-button.tsx
type CopyButtonProps (line 9) | interface CopyButtonProps extends Omit<ButtonProps, 'children' | 'type'> {
function CopyButton (line 14) | function CopyButton ({ text, className, onClick, autoCopy, ...props }: C...
FILE: frontend/app/src/components/dangerous-action-button.tsx
type DangerousActionButtonProps (line 9) | interface DangerousActionButtonProps extends ButtonProps {
FILE: frontend/app/src/components/data-table-heading.tsx
function DataTableHeading (line 3) | function DataTableHeading ({ children }: { children: ReactNode }) {
FILE: frontend/app/src/components/data-table-remote.tsx
type ColumnMeta (line 20) | interface ColumnMeta<TData extends RowData, TValue> {
type PageApiOptions (line 25) | interface PageApiOptions {
type DataTableRemoteProps (line 29) | interface DataTableRemoteProps<TData, TValue> {
function DataTableRemote (line 50) | function DataTableRemote<TData, TValue> ({
function getSortingSearchString (line 235) | function getSortingSearchString (sorting: SortingState) {
function TablePagination (line 241) | function TablePagination ({ className, limit = 4, loading, table }: { cl...
function steps (line 318) | function steps (from: number, to: number) {
function getColSpan (line 330) | function getColSpan<TData extends RowData, TValue> (columnDef: ColumnDef...
FILE: frontend/app/src/components/data-table.tsx
type DataTableClassNames (line 10) | interface DataTableClassNames {
type DataTableProps (line 18) | interface DataTableProps<TData, TValue> {
function DataTable (line 29) | function DataTable<TData, TValue> ({
FILE: frontend/app/src/components/datasource/create-datasource-form.tsx
function CreateDatasourceForm (line 26) | function CreateDatasourceForm ({ knowledgeBaseId, transitioning, onCreat...
function DataSourceTypeField (line 65) | function DataSourceTypeField () {
function DataSourceTypeSpecFields (line 102) | function DataSourceTypeSpecFields () {
type CreateDatasourceFormParams (line 130) | type CreateDatasourceFormParams = z.infer<typeof createDatasourceSchema>;
function switchDatasource (line 149) | function switchDatasource (data: CreateDatasourceFormParams, type: Creat...
function preCreate (line 176) | async function preCreate (ds: CreateDatasourceFormParams): Promise<BaseC...
FILE: frontend/app/src/components/datasource/datasource-card.tsx
function DatasourceCard (line 16) | function DatasourceCard ({ knowledgeBaseId, datasource }: { knowledgeBas...
function DatasourceCardDetails (line 67) | function DatasourceCardDetails ({ datasource }: { datasource: Datasource...
FILE: frontend/app/src/components/datasource/datasource-create-option.tsx
function DatasourceCreateOption (line 6) | function DatasourceCreateOption ({
FILE: frontend/app/src/components/datasource/no-datasource-placeholder.tsx
function NoDatasourcePlaceholder (line 1) | function NoDatasourcePlaceholder () {
FILE: frontend/app/src/components/datasource/update-datasource-form.tsx
function UpdateDatasourceForm (line 17) | function UpdateDatasourceForm ({ knowledgeBaseId, datasource, onUpdated ...
type UpdateDatasourceFormParams (line 47) | interface UpdateDatasourceFormParams {
FILE: frontend/app/src/components/date-format.tsx
function DateFormat (line 5) | function DateFormat ({ className, date, format: formatStr = 'yyyy-MM-dd ...
FILE: frontend/app/src/components/date-range-picker.tsx
type DateRangePickerProps (line 18) | interface DateRangePickerProps {
function DateRangePicker (line 26) | function DateRangePicker({
FILE: frontend/app/src/components/diff-seconds.tsx
function diff (line 4) | function diff (from: Date | string | number | null | undefined, to: Date...
function DiffSeconds (line 18) | function DiffSeconds ({ className, from, to }: { className?: string, fro...
FILE: frontend/app/src/components/document-viewer.tsx
type DocumentPreviewProps (line 11) | interface DocumentPreviewProps {
function DocumentViewer (line 18) | function DocumentViewer ({ content, mime }: DocumentPreviewProps) {
function DocumentPreviewDialog (line 32) | function DocumentPreviewDialog ({ title, name, mime, content }: { title:...
function MarkdownViewer (line 55) | function MarkdownViewer ({ value: propValue }: { value: string }) {
FILE: frontend/app/src/components/documents/documents-table-filters.tsx
type DocumentsTableFiltersProps (line 16) | interface DocumentsTableFiltersProps {
function DocumentsTableFilters (line 22) | function DocumentsTableFilters ({ knowledgeBaseId, table, onFilterChange...
FILE: frontend/app/src/components/documents/documents-table.tsx
function DocumentsTable (line 137) | function DocumentsTable ({ knowledgeBaseId }: { knowledgeBaseId: number ...
FILE: frontend/app/src/components/embedding-models/CreateEmbeddingModelForm.tsx
function CreateEmbeddingModelForm (line 40) | function CreateEmbeddingModelForm ({ transitioning, onCreated }: { trans...
FILE: frontend/app/src/components/embedding-models/EmbeddingModelInfo.tsx
function EmbeddingModelInfo (line 6) | function EmbeddingModelInfo ({ className, id }: { className?: string, id...
FILE: frontend/app/src/components/embedding-models/EmbeddingModelsTable.tsx
function EmbeddingModelsTable (line 15) | function EmbeddingModelsTable () {
FILE: frontend/app/src/components/embedding-models/UpdateEmbeddingModelForm.tsx
function UpdateEmbeddingModelForm (line 18) | function UpdateEmbeddingModelForm ({ embeddingModel }: { embeddingModel:...
method get (line 118) | get (data) {
method set (line 121) | set () {
method get (line 137) | get (data) {
method set (line 140) | set (data, value) {
method get (line 158) | get (data) {
method set (line 161) | set (data, value) {
FILE: frontend/app/src/components/embedding-models/hooks.tsx
function useAllEmbeddingModels (line 5) | function useAllEmbeddingModels (flag = true) {
function useEmbeddingModel (line 9) | function useEmbeddingModel (id: number | null | undefined) {
function useEmbeddingModelProviders (line 18) | function useEmbeddingModelProviders () {
FILE: frontend/app/src/components/error-card.tsx
type ErrorCardProps (line 5) | interface ErrorCardProps {
function ErrorCard (line 11) | function ErrorCard ({
FILE: frontend/app/src/components/evaluations/cells.tsx
function StatusCell (line 57) | function StatusCell ({ row }: { row: EvaluationTaskItem }) {
FILE: frontend/app/src/components/evaluations/create-evaluation-dataset-form.tsx
function CreateEvaluationDatasetForm (line 33) | function CreateEvaluationDatasetForm ({ transitioning, onCreated }: Omit...
FILE: frontend/app/src/components/evaluations/create-evaluation-dataset-item-form.tsx
function CreateEvaluationDatasetItemForm (line 36) | function CreateEvaluationDatasetItemForm ({ evaluationDatasetId, transit...
FILE: frontend/app/src/components/evaluations/create-evaluation-task-form.tsx
function CreateEvaluationTaskForm (line 19) | function CreateEvaluationTaskForm ({ transitioning, onCreated }: Omit<Co...
FILE: frontend/app/src/components/evaluations/evaluation-dataset-info.tsx
function EvaluationDatasetInfo (line 12) | function EvaluationDatasetInfo ({ evaluationDatasetId }: { evaluationDat...
function EvaluationDatasetInfoDisplay (line 24) | function EvaluationDatasetInfoDisplay ({ evaluationDataset }: { evaluati...
function EvaluationDatasetInfoSkeleton (line 91) | function EvaluationDatasetInfoSkeleton ({}: {}) {
FILE: frontend/app/src/components/evaluations/evaluation-dataset-items-table.tsx
method action (line 32) | action (context) {
method action (line 42) | async action (context) {
function EvaluationDatasetItemsTable (line 55) | function EvaluationDatasetItemsTable ({ evaluationDatasetId }: { evaluat...
FILE: frontend/app/src/components/evaluations/evaluation-datasets-table.tsx
function EvaluationDatasetsTable (line 53) | function EvaluationDatasetsTable () {
FILE: frontend/app/src/components/evaluations/evaluation-task-info.stories.tsx
method render (line 39) | render () {
method render (line 47) | render () {
FILE: frontend/app/src/components/evaluations/evaluation-task-info.tsx
function EvaluationTaskInfo (line 17) | function EvaluationTaskInfo ({ evaluationTaskId }: { evaluationTaskId: n...
function EvaluationTaskInfoSkeleton (line 27) | function EvaluationTaskInfoSkeleton () {
function EvaluationTaskInfoDisplay (line 59) | function EvaluationTaskInfoDisplay ({ task: { summary, ...task } }: { ta...
function StatusPieChart (line 117) | function StatusPieChart ({ summary }: { summary: Pick<EvaluationTaskSumm...
function StatusPieChartSkeleton (line 192) | function StatusPieChartSkeleton () {
function RagasMetricsChart (line 260) | function RagasMetricsChart ({ summary }: { summary: Pick<EvaluationTaskS...
function RagasMetricsChartSkeleton (line 301) | function RagasMetricsChartSkeleton () {
FILE: frontend/app/src/components/evaluations/evaluation-task-items-table.tsx
function EvaluationTaskItemsTable (line 56) | function EvaluationTaskItemsTable ({ evaluationTaskId }: { evaluationTas...
FILE: frontend/app/src/components/evaluations/evaluation-tasks-table.tsx
function EvaluationTasksTable (line 50) | function EvaluationTasksTable () {
FILE: frontend/app/src/components/evaluations/hooks.ts
function useAllEvaluationDatasets (line 5) | function useAllEvaluationDatasets (flag = true) {
function useEvaluationDataset (line 9) | function useEvaluationDataset (id: number | null | undefined) {
function useEvaluationDatasetItem (line 28) | function useEvaluationDatasetItem (datasetId: number, id: number) {
function mutateEvaluationDatasets (line 37) | function mutateEvaluationDatasets () {
function mutateEvaluationDataset (line 46) | function mutateEvaluationDataset (id: number) {
function useAllEvaluationTasks (line 55) | function useAllEvaluationTasks (flag = true) {
function useEvaluationTask (line 59) | function useEvaluationTask (id: number | null | undefined) {
function mutateEvaluationTasks (line 77) | function mutateEvaluationTasks () {
FILE: frontend/app/src/components/evaluations/keyword-filter-toolbar.tsx
function KeywordFilterToolbar (line 9) | function KeywordFilterToolbar ({ onFilterChange }: { onFilterChange: (fi...
type KeywordFilter (line 58) | type KeywordFilter = z.infer<typeof keywordFilter>;
FILE: frontend/app/src/components/evaluations/update-evaluation-dataset-item-form.tsx
function UpdateEvaluationDatasetItemForm (line 12) | function UpdateEvaluationDatasetItemForm ({ evaluationDatasetId, evaluat...
FILE: frontend/app/src/components/feedbacks/feedbacks-table.tsx
function FeedbacksTable (line 47) | function FeedbacksTable () {
FILE: frontend/app/src/components/form-sections.tsx
type FieldsMap (line 4) | type FieldsMap = Map<string, Map<string, FieldApi<any, any>>>;
type FormSectionsContextValues (line 5) | type FormSectionsContextValues = readonly [FieldsMap, Dispatch<SetStateA...
constant EMPTY_SET (line 8) | const EMPTY_SET = new Map<string, FieldApi<any, any>>();
function FormSectionsProvider (line 10) | function FormSectionsProvider ({ children }: { children?: ReactNode }) {
function useFormSectionFields (line 19) | function useFormSectionFields (section: string): ReadonlyMap<string, Fie...
type FormSectionContextValues (line 24) | interface FormSectionContextValues {
method register (line 29) | register (field: FieldApi<any, any>): () => void {
function FormSection (line 34) | function FormSection ({ value, children }: { value: string, children?: R...
function useRegisterFieldInFormSection (line 70) | function useRegisterFieldInFormSection (field: FieldApi<any, any, any, a...
FILE: frontend/app/src/components/form/biz.tsx
function ProviderSelect (line 209) | function ProviderSelect<Provider extends ProviderOption> ({
function EvaluationDatasetSelect (line 299) | function EvaluationDatasetSelect ({ reverse = true, ref, ...props }: Omi...
function ChatEngineSelect (line 328) | function ChatEngineSelect ({ reverse = true, ref, ...props }: Omit<FormC...
FILE: frontend/app/src/components/form/control-widget.tsx
type FormControlWidgetProps (line 16) | interface FormControlWidgetProps<T, Optional extends boolean = false> {
type FormSwitchProps (line 33) | interface FormSwitchProps extends FormControlWidgetProps<boolean>, Omit<...
type FormCheckboxProps (line 49) | interface FormCheckboxProps extends FormControlWidgetProps<boolean>, Omi...
type FormSelectConfig (line 65) | interface FormSelectConfig<T extends object, K extends KeyOfType<T, Key>> {
type FormComboboxConfig (line 76) | interface FormComboboxConfig<T extends object, K extends KeyOfType<T, Ke...
type FormComboboxProps (line 81) | interface FormComboboxProps<T extends object, K extends KeyOfType<T, Key...
function FormCombobox (line 89) | function FormCombobox<T extends object, K extends KeyOfType<T, Key>> ({ ...
function FormComboboxClearButton (line 168) | function FormComboboxClearButton ({ onClick }: { onClick?: () => void }) {
FILE: frontend/app/src/components/form/create-entity-form.tsx
type CreateEntityFormBetaProps (line 8) | interface CreateEntityFormBetaProps<R, I> {
type CreateEntityFormComponent (line 16) | interface CreateEntityFormComponent<R, I> extends FunctionComponent<Crea...
function withCreateEntityForm (line 19) | function withCreateEntityForm<T, R, I = any> (
FILE: frontend/app/src/components/form/field-layout.tsx
function formFieldLayout (line 19) | function formFieldLayout<T> (): TypedFormFieldLayouts<
type TypedFormFieldLayouts (line 38) | interface TypedFormFieldLayouts<TFormData> {
type WidgetProps (line 45) | type WidgetProps<TFormData, TName extends DeepKeys<TFormData>> = Require...
type FormFieldLayoutProps (line 47) | interface FormFieldLayoutProps<
function renderWidget (line 65) | function renderWidget<
function FormFieldBasicLayout (line 108) | function FormFieldBasicLayout<
function FormFieldInlineLayout (line 143) | function FormFieldInlineLayout<
function FormFieldContainedLayout (line 175) | function FormFieldContainedLayout<
type DeepKeysOfType (line 214) | type DeepKeysOfType<T, Value> = string & keyof { [P in DeepKeys<T> as De...
function FormPrimitiveArrayFieldBasicLayout (line 216) | function FormPrimitiveArrayFieldBasicLayout<
FILE: frontend/app/src/components/form/root-error.tsx
function FormRootError (line 6) | function FormRootError ({ title = 'Operation failed' }: { title?: string...
function getFormError (line 21) | function getFormError (state: FormState<any>, error: unknown) {
FILE: frontend/app/src/components/form/utils.ts
function onSubmitHelper (line 4) | function onSubmitHelper<T> (
function applyFormError (line 44) | function applyFormError<FormApi extends { fieldInfo: Record<string, Fiel...
FILE: frontend/app/src/components/form/widgets/CodeInput.tsx
type CodeInputProps (line 9) | interface CodeInputProps extends FormControlWidgetProps<string> {
class PlaceholderContentWidget (line 125) | class PlaceholderContentWidget implements monaco.editor.IContentWidget {
method constructor (line 130) | constructor (
method onDidChangeModelContent (line 139) | private onDidChangeModelContent (): void {
method getId (line 147) | getId (): string {
method getDomNode (line 151) | getDomNode (): HTMLElement {
method getPosition (line 164) | getPosition (): monaco.editor.IContentWidgetPosition | null {
FILE: frontend/app/src/components/form/widgets/FileInput.tsx
type FileInputProps (line 6) | interface FileInputProps extends FormControlWidgetProps<File, true> {
FILE: frontend/app/src/components/form/widgets/FilesInput.tsx
type FilesInputProps (line 11) | interface FilesInputProps extends FormControlWidgetProps<File[]> {
FILE: frontend/app/src/components/form/widgets/PromptInput.tsx
type PromptInputProps (line 7) | interface PromptInputProps extends FormControlWidgetProps<string> {
FILE: frontend/app/src/components/graph/GraphCreateEntity.tsx
function GraphCreateEntity (line 26) | function GraphCreateEntity ({ className, knowledgeBaseId, onCreated }: {...
function CreateEntityForm (line 53) | function CreateEntityForm ({ className, entities, onSubmit, onClearSelec...
FILE: frontend/app/src/components/graph/GraphEditor.tsx
function GraphEditor (line 22) | function GraphEditor ({ knowledgeBaseId }: { knowledgeBaseId: number }) {
function SubgraphSelector (line 76) | function SubgraphSelector ({ knowledgeBaseId, query, onQueryChange }: { ...
function Editor (line 130) | function Editor ({ knowledgeBaseId, network, target, onTargetChange, onE...
function getFetchInfo (line 144) | function getFetchInfo (kbId: number, query: string | null): [string | fa...
function parseQuery (line 175) | function parseQuery (query: string | null) {
FILE: frontend/app/src/components/graph/action.ts
type ActionStatus (line 3) | type ActionStatus<T> = {
function useAction (line 25) | function useAction<T> (action: () => Promise<T>) {
FILE: frontend/app/src/components/graph/components/EditingButton.tsx
type EditingButtonProps (line 4) | interface EditingButtonProps {
function EditingButton (line 14) | function EditingButton ({ onEnterSubgraph, subGraphTitle = 'Subgraph', e...
FILE: frontend/app/src/components/graph/components/EntitiesTable.tsx
type RemoteEntity (line 14) | interface RemoteEntity extends Partial<Omit<Entity, 'id'>> {
function EntitiesTable (line 20) | function EntitiesTable ({ className, isLoading, table }: { className?: s...
FILE: frontend/app/src/components/graph/components/InputField.tsx
type InputFieldProps (line 3) | interface InputFieldProps extends Omit<InputHTMLAttributes<HTMLInputElem...
FILE: frontend/app/src/components/graph/components/JsonField.tsx
type JsonFieldProps (line 8) | interface JsonFieldProps {
type JsonFieldInstance (line 14) | interface JsonFieldInstance {
method value (line 35) | get value () {
method value (line 42) | set value (value: any) {
FILE: frontend/app/src/components/graph/components/LinkDetails.tsx
function LinkDetails (line 18) | function LinkDetails ({
FILE: frontend/app/src/components/graph/components/NetworkCanvas.tsx
type NetworkCanvasProps (line 7) | interface NetworkCanvasProps<Node extends NetworkNode, Link extends Netw...
function NetworkCanvas (line 13) | function NetworkCanvas<Node extends NetworkNode, Link extends NetworkLin...
FILE: frontend/app/src/components/graph/components/NetworkViewer.tsx
type NetworkViewerProps (line 10) | interface NetworkViewerProps {
type NetworkViewerDetailsProps (line 18) | interface NetworkViewerDetailsProps {
function randomPosition (line 24) | function randomPosition (radius: number, kbSpacing: number, kbIndex: num...
function NetworkViewer (line 34) | function NetworkViewer ({ network, loading, loadingTitle, className, Det...
FILE: frontend/app/src/components/graph/components/NodeDetails.tsx
function NodeDetails (line 18) | function NodeDetails ({
FILE: frontend/app/src/components/graph/components/SearchEntity.tsx
type SearchEntityProps (line 14) | interface SearchEntityProps extends UseEntitiesRequired {
function SearchEntity (line 21) | function SearchEntity ({ knowledgeBaseId, ...props }: SearchEntityProps) {
function TableFilterForm (line 50) | function TableFilterForm ({ className, filter, onFilterChange, disabled ...
FILE: frontend/app/src/components/graph/components/SearchEntityById.tsx
type SearchEntityProps (line 14) | interface SearchEntityProps extends UseEntitiesRequired {
function SearchEntityById (line 21) | function SearchEntityById ({ knowledgeBaseId, ...props }: SearchEntityPr...
function TableFilterForm (line 50) | function TableFilterForm ({ className, filter, onFilterChange, disabled ...
FILE: frontend/app/src/components/graph/components/TextareaField.tsx
type TextareaFieldProps (line 3) | interface TextareaFieldProps extends Omit<TextareaHTMLAttributes<HTMLTex...
FILE: frontend/app/src/components/graph/network/CanvasNetworkRenderer.ts
type NetworkNodeView (line 9) | interface NetworkNodeView extends SimulationNodeDatum {
type NetworkLinkView (line 18) | interface NetworkLinkView extends SimulationLinkDatum<NetworkNodeView> {
class CanvasNetworkRenderer (line 28) | class CanvasNetworkRenderer<Node extends NetworkNode, Link extends Netwo...
method constructor (line 77) | constructor(
method compile (line 84) | private compile(options: NetworkRendererOptions<Node, Link>) {
method updateViewportBounds (line 117) | private updateViewportBounds() {
method isNodeInViewport (line 139) | private isNodeInViewport(node: any): boolean {
method isLinkInViewport (line 148) | private isLinkInViewport(link: any): boolean {
method mount (line 164) | mount(container: HTMLElement) {
method unmount (line 249) | unmount() {
method drawNodeWithLabel (line 281) | private drawNodeWithLabel(node: any, ctx: CanvasRenderingContext2D, gl...
method drawLink (line 319) | private drawLink(link: any, ctx: CanvasRenderingContext2D) {
method drawArrow (line 344) | private drawArrow(
method onNodeClick (line 376) | private onNodeClick(node: any, event: MouseEvent) {
method onLinkClick (line 384) | private onLinkClick(link: any, event: MouseEvent) {
method onBackgroundClick (line 392) | private onBackgroundClick() {
method highlightLink (line 397) | private highlightLink(link: any) {
method highlightConnections (line 402) | private highlightConnections(node: any) {
method clearHighlight (line 442) | private clearHighlight() {
method calculateAndCacheClusters (line 463) | private calculateAndCacheClusters() {
method calculateAndCacheAdjacency (line 477) | private calculateAndCacheAdjacency() {
method findClusters (line 510) | private findClusters(): Map<IdType, number> {
method render (line 554) | render() {
method focusNode (line 615) | focusNode(id: IdType): void {}
method blurNode (line 616) | blurNode(): void {}
method focusLink (line 617) | focusLink(id: IdType): void {}
method blurLink (line 618) | blurLink(): void {}
FILE: frontend/app/src/components/graph/network/Network.ts
type IdType (line 3) | type IdType = string | number;
type NetworkNode (line 5) | interface NetworkNode {
type NetworkLink (line 9) | interface NetworkLink {
type BaseNetworkOptions (line 15) | interface BaseNetworkOptions {
type ReadonlyNetwork (line 19) | interface ReadonlyNetwork<Node extends NetworkNode, Link extends Network...
type Network (line 31) | interface Network<Node extends NetworkNode, Link extends NetworkLink> ex...
type NetworkEvents (line 41) | interface NetworkEvents {
method nodesIn (line 57) | nodesIn (ids: Iterable<IdType>) {
method nodesNotIn (line 61) | nodesNotIn (ids: Iterable<IdType>) {
method nodesWithAttrMatch (line 65) | nodesWithAttrMatch<K extends keyof Node> (field: K, test: (value: Node[K...
method merge (line 69) | merge<NewNode extends NetworkNode, NewLink extends NetworkLink> (nodeIde...
class BaseNetwork (line 113) | class BaseNetwork<Node extends NetworkNode, Link extends NetworkLink> ex...
method constructor (line 125) | constructor ({
method node (line 135) | node (nodeId: IdType) {
method addNode (line 139) | addNode (node: Node): void {
method removeNode (line 144) | removeNode (nodeId: IdType) {
method link (line 172) | link (nodeId: IdType) {
method addLink (line 176) | addLink (link: Link): void {
method removeLink (line 197) | removeLink (linkId: IdType): void {
method nodes (line 210) | nodes () {
method links (line 214) | links () {
method nodeNeighborhoods (line 218) | nodeNeighborhoods (id: IdType): Set<IdType> | null {
method replaceNodeAttrs (line 228) | replaceNodeAttrs (id: IdType, partial: Omit<Node, 'id'>) {
method replaceLinkAttrs (line 241) | replaceLinkAttrs (id: IdType, partial: Omit<Link, 'id' | 'source' | 't...
method constructor (line 259) | protected constructor (private _source: ReadonlyNetwork<Node, Link>) {
method nodes (line 265) | nodes () {
method links (line 285) | links () {
method node (line 300) | node (idType: IdType) {
method link (line 304) | link (idType: IdType) {
method nodeNeighborhoods (line 308) | nodeNeighborhoods (id: IdType): Set<IdType> | null {
class NodesWhitelistView (line 313) | class NodesWhitelistView<Node extends NetworkNode, Link extends NetworkL...
method constructor (line 314) | constructor (view: ReadonlyNetwork<Node, Link>, private readonly _node...
method includesNode (line 318) | includesNode (node: Node): boolean {
class NodesBlacklistView (line 323) | class NodesBlacklistView<Node extends NetworkNode, Link extends NetworkL...
method constructor (line 324) | constructor (view: ReadonlyNetwork<Node, Link>, private readonly _node...
method includesNode (line 328) | includesNode (node: Node): boolean {
class NodeAttributeFilterView (line 333) | class NodeAttributeFilterView<Node extends NetworkNode, Link extends Net...
method constructor (line 334) | constructor (view: ReadonlyNetwork<Node, Link>, private _attr: K, priv...
method includesNode (line 336) | includesNode (node: Node): boolean {
FILE: frontend/app/src/components/graph/network/NetworkRendererOptions.ts
type NetworkRendererOptions (line 3) | interface NetworkRendererOptions<Node, Link> {
FILE: frontend/app/src/components/graph/remote.ts
function useRemote (line 3) | function useRemote<Params extends any[], Data> (initialData: Data, fn: (...
FILE: frontend/app/src/components/graph/selectEntities.ts
type SearchEntityFilter (line 9) | type SearchEntityFilter = {
function shouldFetch (line 14) | function shouldFetch (filter: SearchEntityFilter) {
type UseEntitiesRequired (line 18) | type UseEntitiesRequired = Pick<ReturnType<typeof useEntities>, 'selecte...
function useEntities (line 20) | function useEntities () {
function useGraphEntitiesTable (line 56) | function useGraphEntitiesTable (kbId: number, { rowSelection, setRowSele...
function useGraphEntitiesByIdsTable (line 97) | function useGraphEntitiesByIdsTable (kbId: number, { rowSelection, setRo...
FILE: frontend/app/src/components/graph/useDirtyEntity.ts
function useDirtyEntity (line 7) | function useDirtyEntity (kbId: number, id: any) {
FILE: frontend/app/src/components/graph/useDirtyRelationship.ts
function useDirtyRelationship (line 7) | function useDirtyRelationship (kbId: number, id: any) {
FILE: frontend/app/src/comp
Condensed preview — 891 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (2,394K chars).
[
{
"path": ".dockerignore",
"chars": 130,
"preview": "# Frontend\n.github\ne2e\nfrontend/Dockerfile\nfrontend/**/node_modules\nfrontend/app/.next\nfrontend/app/.swc\nfrontend/packag"
},
{
"path": ".github/actions/decide/.gitignore",
"chars": 12,
"preview": "node_modules"
},
{
"path": ".github/actions/decide/action.yml",
"chars": 646,
"preview": "name: 'Decide action'\ndescription: \"See issue https://github.com/pingcap/tidb.ai/issues/314\"\ninputs:\n pr-e2e-frontend-l"
},
{
"path": ".github/actions/decide/index.js",
"chars": 1670,
"preview": "import * as core from '@actions/core';\nimport { context, } from '@actions/github';\n\nconst FRONTEND_PREFIX = core.getInpu"
},
{
"path": ".github/actions/decide/package.json",
"chars": 199,
"preview": "{\n \"name\": \"@tidbai/ci-decide-action\",\n \"version\": \"1.0.0\",\n \"type\": \"module\",\n \"module\": \"index.js\",\n \"devDependen"
},
{
"path": ".github/workflows/backend-test.yml",
"chars": 546,
"preview": "name: Backend Test\n\non:\n push:\n branches:\n - main\n paths:\n - backend/**\n pull_request:\n branches:\n "
},
{
"path": ".github/workflows/deploy.yml",
"chars": 481,
"preview": "name: Deploy to Production\non:\n workflow_dispatch:\n\njobs:\n build:\n name: Build\n runs-on: ubuntu-latest\n ste"
},
{
"path": ".github/workflows/regression.yml",
"chars": 1626,
"preview": "name: Regression Test\non:\n workflow_dispatch:\n inputs:\n dataset:\n description: 'Langfuse dataset to test"
},
{
"path": ".github/workflows/release.yml",
"chars": 9368,
"preview": "name: Build and Publish\n\non:\n pull_request:\n branches:\n - main\n types:\n - opened\n - synchronize\n "
},
{
"path": ".github/workflows/verify.yml",
"chars": 1080,
"preview": "name: Verify\n\non:\n push:\n branches:\n - main\n paths:\n - frontend/**\n - README.md\n pull_request:\n "
},
{
"path": ".gitignore",
"chars": 141,
"preview": "*_dev.ipynb\n.idea\n.vscode\n.env\n.ruff_cache\n\nredis-data\ndata\nvenv\n.venv\nlocal-embedding-reranker\n\n*.swp\n*.swo\n\n.next\nnode"
},
{
"path": "CONTRIBUTING.md",
"chars": 4365,
"preview": "# How to contribute\n\n## Contributing Guidelines\n\n[pingcap/autoflow](https://github.com/pingcap/autoflow) is an open-sour"
},
{
"path": "LICENSE.txt",
"chars": 11336,
"preview": " Apache License\n Version 2.0, January 2004\n "
},
{
"path": "README.md",
"chars": 4537,
"preview": "<!-- markdownlint-disable MD033 MD041 -->\n\n<div align=\"center\">\n<h1>AutoFlow</h1>\n <a href='https://www.pingcap.com/tid"
},
{
"path": "backend/.dockerignore",
"chars": 251,
"preview": "# MacOS\n.DS_Store\n\n# Environment\n.env\n.venv\n\n# Git\n.git\n\n# IDE\n.idea\n.vscode\n\n# Cache\n.pytest_cache\n.ruff_cache\n**/__pyc"
},
{
"path": "backend/.gitignore",
"chars": 3144,
"preview": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packagi"
},
{
"path": "backend/.pre-commit-config.yaml",
"chars": 184,
"preview": "repos:\n- repo: https://github.com/astral-sh/ruff-pre-commit\n # Ruff version.\n rev: v0.8.6\n hooks:\n # Run the linte"
},
{
"path": "backend/.python-version",
"chars": 6,
"preview": "3.12.3"
},
{
"path": "backend/Dockerfile",
"chars": 899,
"preview": "FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim\n\nWORKDIR /app/\n\n# Setup supervisord.\nRUN apt-get update && apt-get in"
},
{
"path": "backend/Makefile",
"chars": 1091,
"preview": ".PHONY: test\n\nmakemigrations:\n\t@echo \"Creating migrations...\"\n\t@if [ -z \"$(NAME)\" ]; then \\\n\t\tuv run alembic revision --"
},
{
"path": "backend/README.md",
"chars": 412,
"preview": "# Backend of tidb.ai\n\n\n## Development\n\n### Install dependencies\n\n1. Install [uv](https://docs.astral.sh/uv/getting-start"
},
{
"path": "backend/alembic.ini",
"chars": 1722,
"preview": "# A generic, single database configuration.\n\n[alembic]\n# path to migration scripts\nscript_location = app/alembic\n\n# temp"
},
{
"path": "backend/app/__init__.py",
"chars": 63,
"preview": "import os\n\nos.environ[\"LITELLM_LOCAL_MODEL_COST_MAP\"] = \"True\"\n"
},
{
"path": "backend/app/alembic/env.py",
"chars": 2795,
"preview": "from logging.config import fileConfig\n\nfrom alembic import context\nfrom sqlalchemy import engine_from_config, pool\nfrom "
},
{
"path": "backend/app/alembic/script.py.mako",
"chars": 569,
"preview": "\"\"\"${message}\n\nRevision ID: ${up_revision}\nRevises: ${down_revision | comma,n}\nCreate Date: ${create_date}\n\n\"\"\"\nfrom ale"
},
{
"path": "backend/app/alembic/versions/00534dc350db_.py",
"chars": 2077,
"preview": "\"\"\"empty message\n\nRevision ID: 00534dc350db\nRevises: 10f36e8a25c4\nCreate Date: 2024-08-26 12:46:00.203425\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/041fbef26e3a_.py",
"chars": 1320,
"preview": "\"\"\"empty message\n\nRevision ID: 041fbef26e3a\nRevises: 04d81be446c3\nCreate Date: 2024-08-19 08:20:13.695891\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/04947f9684ab_public_chat_engine.py",
"chars": 676,
"preview": "\"\"\"public_chat_engine\n\nRevision ID: 04947f9684ab\nRevises: 211f3c5aa125\nCreate Date: 2025-05-28 15:13:22.058160\n\n\"\"\"\n\nfro"
},
{
"path": "backend/app/alembic/versions/04d4f05116ed_.py",
"chars": 3244,
"preview": "\"\"\"empty message\n\nRevision ID: 04d4f05116ed\nRevises: 94b198e20946\nCreate Date: 2024-07-23 01:26:07.117623\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/04d81be446c3_.py",
"chars": 934,
"preview": "\"\"\"empty message\n\nRevision ID: 04d81be446c3\nRevises: e32f1e546eec\nCreate Date: 2024-08-08 17:11:50.178696\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/10f36e8a25c4_.py",
"chars": 861,
"preview": "\"\"\"empty message\n\nRevision ID: 10f36e8a25c4\nRevises: 041fbef26e3a\nCreate Date: 2024-08-20 09:10:50.130219\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/197bc8be72d1_.py",
"chars": 830,
"preview": "\"\"\"empty message\n\nRevision ID: 197bc8be72d1\nRevises: 04d4f05116ed\nCreate Date: 2024-07-25 14:49:29.363595\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/211f3c5aa125_chunking_settings.py",
"chars": 704,
"preview": "\"\"\"chunking_settings\n\nRevision ID: 211f3c5aa125\nRevises: 2adc0b597dcd\nCreate Date: 2025-02-17 14:20:56.253857\n\n\"\"\"\n\nfrom"
},
{
"path": "backend/app/alembic/versions/27a6723b767a_.py",
"chars": 950,
"preview": "\"\"\"empty message\n\nRevision ID: 27a6723b767a\nRevises: d2ad44deab20\nCreate Date: 2024-11-29 20:38:05.773083\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/2adc0b597dcd_int_enum_type.py",
"chars": 997,
"preview": "\"\"\"int_enum_type\n\nRevision ID: 2adc0b597dcd\nRevises: a54f966436ce\nCreate Date: 2025-01-24 17:58:08.339090\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/2fc10c21bf88_.py",
"chars": 15844,
"preview": "\"\"\"empty message\n\nRevision ID: 5fdea8e26454\nRevises:\nCreate Date: 2024-07-10 14:43:55.913126\n\n\"\"\"\n\nfrom alembic import o"
},
{
"path": "backend/app/alembic/versions/749767db5505_add_recommend_questions.py",
"chars": 1591,
"preview": "\"\"\"add recommend questions\n\nRevision ID: 749767db5505\nRevises: 8093333c0d87\nCreate Date: 2024-10-15 16:02:14.203584\n\n\"\"\""
},
{
"path": "backend/app/alembic/versions/8093333c0d87_.py",
"chars": 848,
"preview": "\"\"\"empty message\n\nRevision ID: 8093333c0d87\nRevises: 830fd9c44f39\nCreate Date: 2024-09-24 12:23:48.076576\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/830fd9c44f39_.py",
"chars": 1234,
"preview": "\"\"\"empty message\n\nRevision ID: 830fd9c44f39\nRevises: dfee070b8abd\nCreate Date: 2024-09-19 13:04:30.351449\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/94b198e20946_.py",
"chars": 2891,
"preview": "\"\"\"empty message\n\nRevision ID: 94b198e20946\nRevises: 2fc10c21bf88\nCreate Date: 2024-07-11 15:19:19.174568\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/a54f966436ce_evaluation.py",
"chars": 4552,
"preview": "\"\"\"evaluation\n\nRevision ID: a54f966436ce\nRevises: 27a6723b767a\nCreate Date: 2024-12-09 16:46:21.077517\n\n\"\"\"\n\nfrom alembi"
},
{
"path": "backend/app/alembic/versions/a8c79553c9f6_.py",
"chars": 806,
"preview": "\"\"\"empty message\n\nRevision ID: a8c79553c9f6\nRevises: ac6e4d58580d\nCreate Date: 2024-08-05 13:04:17.572821\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/ac6e4d58580d_.py",
"chars": 1152,
"preview": "\"\"\"empty message\n\nRevision ID: ac6e4d58580d\nRevises: 197bc8be72d1\nCreate Date: 2024-08-01 16:15:59.164348\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/bd17a4ebccc5_.py",
"chars": 1173,
"preview": "\"\"\"empty message\n\nRevision ID: bd17a4ebccc5\nRevises: a8c79553c9f6\nCreate Date: 2024-08-08 01:20:42.069228\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/c7f016a904c1_.py",
"chars": 660,
"preview": "\"\"\"empty message\n\nRevision ID: c7f016a904c1\nRevises: 749767db5505\nCreate Date: 2024-10-30 13:28:17.345385\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/d2ad44deab20_multiple_kb.py",
"chars": 4173,
"preview": "\"\"\"multiple_kb\n\nRevision ID: d2ad44deab20\nRevises: c7f016a904c1\nCreate Date: 2024-11-15 09:51:42.493749\n\n\"\"\"\n\nfrom alemb"
},
{
"path": "backend/app/alembic/versions/dfee070b8abd_.py",
"chars": 1787,
"preview": "\"\"\"empty message\n\nRevision ID: dfee070b8abd\nRevises: eb0b85608c0a\nCreate Date: 2024-09-10 10:45:50.318277\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/e32f1e546eec_.py",
"chars": 1971,
"preview": "\"\"\"empty message\n\nRevision ID: e32f1e546eec\nRevises: bd17a4ebccc5\nCreate Date: 2024-08-08 03:55:14.042290\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/alembic/versions/eb0b85608c0a_.py",
"chars": 1317,
"preview": "\"\"\"empty message\n\nRevision ID: eb0b85608c0a\nRevises: 00534dc350db\nCreate Date: 2024-08-28 15:10:04.219389\n\n\"\"\"\n\nfrom ale"
},
{
"path": "backend/app/api/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/chat/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/chat/routes.py",
"chars": 588,
"preview": "from typing import Optional\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page, Params\n\nfrom app"
},
{
"path": "backend/app/api/admin_routes/chat_engine.py",
"chars": 2009,
"preview": "from fastapi import APIRouter, Depends\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.deps import SessionDep,"
},
{
"path": "backend/app/api/admin_routes/document/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/document/routes.py",
"chars": 681,
"preview": "from typing import Annotated\n\nfrom fastapi import APIRouter, Depends, Query\nfrom fastapi_pagination import Params, Page\n"
},
{
"path": "backend/app/api/admin_routes/embedding_model/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/embedding_model/models.py",
"chars": 1335,
"preview": "from datetime import datetime\nfrom typing import Any\n\nfrom pydantic import BaseModel, field_validator\nfrom typing_extens"
},
{
"path": "backend/app/api/admin_routes/embedding_model/routes.py",
"chars": 3546,
"preview": "from typing import List\n\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Params, Page\n\nfrom app.ap"
},
{
"path": "backend/app/api/admin_routes/evaluation/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/evaluation/evaluation_dataset.py",
"chars": 7787,
"preview": "import pandas as pd\nfrom fastapi import APIRouter, status, HTTPException, Depends\nfrom fastapi_pagination import Page\nfr"
},
{
"path": "backend/app/api/admin_routes/evaluation/evaluation_task.py",
"chars": 9342,
"preview": "import logging\nfrom typing import Optional, List\n\nimport sqlmodel\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pa"
},
{
"path": "backend/app/api/admin_routes/evaluation/models.py",
"chars": 1404,
"preview": "from typing import Optional\nfrom uuid import UUID\nfrom datetime import datetime\n\nfrom fastapi_pagination import Params\nf"
},
{
"path": "backend/app/api/admin_routes/evaluation/tools.py",
"chars": 1026,
"preview": "from typing import TypeVar, Type\nfrom fastapi import status, HTTPException\nfrom sqlmodel import SQLModel, Session\n\nT = T"
},
{
"path": "backend/app/api/admin_routes/feedback.py",
"chars": 1018,
"preview": "from typing import Annotated, Optional\n\nfrom fastapi import APIRouter, Depends, Query\nfrom fastapi_pagination import Par"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/knowledge_base/chunk/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/knowledge_base/chunk/models.py",
"chars": 357,
"preview": "from pydantic import BaseModel\n\nfrom app.rag.retrievers.chunk.schema import VectorSearchRetrieverConfig\n\n\nclass KBChunkR"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/chunk/routes.py",
"chars": 1100,
"preview": "import logging\n\nfrom fastapi import APIRouter\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.rag.retr"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/data_source/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/knowledge_base/data_source/models.py",
"chars": 703,
"preview": "from pydantic import BaseModel, field_validator\n\nfrom app.models import DataSourceType\n\n\nclass KBDataSource(BaseModel):\n"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/data_source/routes.py",
"chars": 4315,
"preview": "import logging\n\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Params, Page\n\nfrom app.api.admin_r"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/document/models.py",
"chars": 2291,
"preview": "from datetime import datetime\nfrom typing import Optional\nfrom uuid import UUID\n\nfrom pydantic import BaseModel, Field\n\n"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/document/routes.py",
"chars": 6968,
"preview": "import logging\nfrom typing import Annotated\n\nfrom fastapi import APIRouter, Depends, Query, HTTPException\nfrom fastapi_p"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/graph/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/knowledge_base/graph/knowledge/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/knowledge_base/graph/knowledge/routes.py",
"chars": 2678,
"preview": "from fastapi import HTTPException\nfrom starlette import status\n\nfrom app.api.admin_routes.knowledge_base.graph.models im"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/graph/models.py",
"chars": 1599,
"preview": "from typing import List, Optional\nfrom pydantic import BaseModel, model_validator\n\nfrom app.rag.retrievers.knowledge_gra"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/graph/routes.py",
"chars": 9521,
"preview": "import logging\nfrom typing import List\nimport json\n\nfrom fastapi import APIRouter, HTTPException, status\nfrom fastapi.re"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/models.py",
"chars": 3638,
"preview": "from datetime import datetime\nfrom typing import Optional\nfrom uuid import UUID\nfrom pydantic import BaseModel, field_va"
},
{
"path": "backend/app/api/admin_routes/knowledge_base/routes.py",
"chars": 8490,
"preview": "import logging\n\nfrom fastapi import APIRouter, Depends, HTTPException\nfrom fastapi_pagination import Params, Page\nfrom a"
},
{
"path": "backend/app/api/admin_routes/langfuse.py",
"chars": 1045,
"preview": "import logging\nfrom pydantic import BaseModel\n\nfrom fastapi import APIRouter\nfrom langfuse import Langfuse\n\nfrom app.api"
},
{
"path": "backend/app/api/admin_routes/legacy_retrieve.py",
"chars": 4477,
"preview": "import logging\nfrom typing import Optional, List\n\nfrom fastapi import APIRouter\nfrom sqlmodel import Session\nfrom app.mo"
},
{
"path": "backend/app/api/admin_routes/llm/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/llm/routes.py",
"chars": 3038,
"preview": "from typing import List\n\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page, Params\nfrom llama_i"
},
{
"path": "backend/app/api/admin_routes/models.py",
"chars": 1006,
"preview": "from uuid import UUID\nfrom typing import Optional\nfrom pydantic import BaseModel\n\nfrom app.api.admin_routes.embedding_mo"
},
{
"path": "backend/app/api/admin_routes/reranker_model/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/admin_routes/reranker_model/routes.py",
"chars": 4337,
"preview": "from typing import List\n\nfrom fastapi import Depends, APIRouter\nfrom fastapi_pagination import Params, Page\nfrom pydanti"
},
{
"path": "backend/app/api/admin_routes/semantic_cache.py",
"chars": 2026,
"preview": "from typing import Optional, Dict\nimport time\nimport logging\n\nfrom fastapi import APIRouter, Body\nfrom app.api.deps impo"
},
{
"path": "backend/app/api/admin_routes/site_setting.py",
"chars": 1947,
"preview": "from typing import Dict\nfrom pydantic import BaseModel\nfrom http import HTTPStatus\nfrom fastapi import APIRouter, HTTPEx"
},
{
"path": "backend/app/api/admin_routes/stats.py",
"chars": 974,
"preview": "from datetime import date\nfrom pydantic import BaseModel\nfrom fastapi import APIRouter\nfrom app.api.deps import CurrentS"
},
{
"path": "backend/app/api/admin_routes/upload.py",
"chars": 2384,
"preview": "import os\nimport time\nfrom typing import List\nfrom fastapi import APIRouter, UploadFile, HTTPException, status\n\nfrom app"
},
{
"path": "backend/app/api/admin_routes/user.py",
"chars": 610,
"preview": "from typing import Optional\nfrom fastapi import APIRouter, Depends\nfrom fastapi_pagination import Page, Params\n\nfrom app"
},
{
"path": "backend/app/api/deps.py",
"chars": 833,
"preview": "from typing import Annotated\nfrom fastapi import Depends\nfrom sqlmodel import Session\nfrom sqlmodel.ext.asyncio.session "
},
{
"path": "backend/app/api/main.py",
"chars": 4228,
"preview": "from fastapi import APIRouter\nfrom app.api.routes import (\n chat_engine,\n index,\n chat,\n user,\n api_key,\n"
},
{
"path": "backend/app/api/routes/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/routes/api_key.py",
"chars": 1189,
"preview": "from fastapi import APIRouter, Depends\nfrom pydantic import BaseModel\nfrom fastapi_pagination import Params, Page\n\nfrom "
},
{
"path": "backend/app/api/routes/chat.py",
"chars": 7087,
"preview": "import logging\nfrom uuid import UUID\nfrom typing import List, Optional, Annotated\nfrom http import HTTPStatus\n\nfrom pyda"
},
{
"path": "backend/app/api/routes/chat_engine.py",
"chars": 1166,
"preview": "import logging\n\nfrom fastapi import APIRouter, Depends\nfrom app.api.deps import SessionDep\nfrom fastapi_pagination impor"
},
{
"path": "backend/app/api/routes/document.py",
"chars": 851,
"preview": "from fastapi import HTTPException, APIRouter\nfrom fastapi.responses import StreamingResponse\nfrom app.api.deps import Se"
},
{
"path": "backend/app/api/routes/feedback.py",
"chars": 1178,
"preview": "from fastapi import APIRouter, HTTPException, Header\nfrom http import HTTPStatus\nfrom pydantic import BaseModel\n\nfrom ap"
},
{
"path": "backend/app/api/routes/index.py",
"chars": 1115,
"preview": "from fastapi import APIRouter\nfrom sqlmodel import text\n\nfrom app.api.deps import SessionDep\nfrom app.api.routes.models "
},
{
"path": "backend/app/api/routes/models.py",
"chars": 517,
"preview": "from pydantic import BaseModel\n\n\nclass RequiredConfigStatus(BaseModel):\n default_llm: bool\n default_embedding_mode"
},
{
"path": "backend/app/api/routes/retrieve/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/api/routes/retrieve/models.py",
"chars": 841,
"preview": "from typing import Optional\n\nfrom pydantic import BaseModel\n\nfrom app.rag.retrievers.knowledge_graph.schema import (\n "
},
{
"path": "backend/app/api/routes/retrieve/routes.py",
"chars": 2205,
"preview": "import logging\n\nfrom fastapi import APIRouter\nfrom app.api.deps import SessionDep, CurrentSuperuserDep\nfrom app.rag.retr"
},
{
"path": "backend/app/api/routes/user.py",
"chars": 229,
"preview": "from fastapi import APIRouter\n\nfrom app.api.deps import CurrentUserDep\nfrom app.auth.schemas import UserRead\n\nrouter = A"
},
{
"path": "backend/app/api_server.py",
"chars": 2021,
"preview": "import app.logger\nimport sentry_sdk\n\nfrom dotenv import load_dotenv\nfrom contextlib import asynccontextmanager\nfrom fast"
},
{
"path": "backend/app/auth/api_keys.py",
"chars": 4363,
"preview": "import base64\nimport string\nimport secrets\nimport hashlib\nfrom typing import Optional, Tuple\n\nfrom fastapi import Reques"
},
{
"path": "backend/app/auth/db.py",
"chars": 608,
"preview": "from fastapi import Depends\nfrom fastapi_users_db_sqlmodel import SQLModelUserDatabaseAsync\nfrom fastapi_users_db_sqlmod"
},
{
"path": "backend/app/auth/schemas.py",
"chars": 209,
"preview": "import uuid\n\nfrom fastapi_users import schemas\n\n\nclass UserRead(schemas.BaseUser[uuid.UUID]):\n pass\n\n\nclass UserCreat"
},
{
"path": "backend/app/auth/users.py",
"chars": 6128,
"preview": "import logging\nimport uuid\nimport contextlib\nfrom http import HTTPStatus\nfrom typing import Optional\n\nfrom fastapi impor"
},
{
"path": "backend/app/celery.py",
"chars": 473,
"preview": "from celery import Celery\n\nfrom app.core.config import settings\n\n\napp = Celery(\n settings.PROJECT_NAME,\n broker=se"
},
{
"path": "backend/app/core/config.py",
"chars": 4913,
"preview": "import enum\nfrom typing import Annotated, Any\nfrom urllib.parse import quote\n\nfrom pydantic import (\n AnyUrl,\n Bef"
},
{
"path": "backend/app/core/db.py",
"chars": 2219,
"preview": "import ssl\nimport contextlib\nfrom typing import AsyncGenerator, Generator\n\nfrom sqlmodel import create_engine, Session\nf"
},
{
"path": "backend/app/evaluation/evals.py",
"chars": 12909,
"preview": "import logging\nimport os\n\nimport requests\nimport typing\nimport uuid\nimport json\nfrom tqdm import tqdm\nfrom datetime impo"
},
{
"path": "backend/app/evaluation/evaluators/__init__.py",
"chars": 209,
"preview": "from .language_detector import LanguageEvaluator\nfrom .toxicity import ToxicityEvaluator\nfrom .e2e_rag_evaluator import "
},
{
"path": "backend/app/evaluation/evaluators/e2e_rag_evaluator.py",
"chars": 3050,
"preview": "import time\nfrom typing import Optional, Sequence, Mapping\nfrom llama_index.core.evaluation.base import EvaluationResult"
},
{
"path": "backend/app/evaluation/evaluators/language_detector.py",
"chars": 3797,
"preview": "import asyncio\nimport logging\nfrom typing import Any, Optional, Sequence, Union, cast\n\nfrom llama_index.core import Serv"
},
{
"path": "backend/app/evaluation/evaluators/toxicity.py",
"chars": 4224,
"preview": "import asyncio\nfrom typing import Any, Callable, Optional, Sequence, Tuple, Union\n\nfrom llama_index.core.evaluation.base"
},
{
"path": "backend/app/exceptions.py",
"chars": 3984,
"preview": "from http import HTTPStatus\nfrom uuid import UUID\n\nfrom fastapi import HTTPException\n\n# Common\n\n\nclass InternalServerErr"
},
{
"path": "backend/app/experiments/sql_extraction.py",
"chars": 2490,
"preview": "import dspy\nfrom dspy.functional import TypedPredictor\nimport logging\nfrom typing import Optional\n\nfrom app.experiments."
},
{
"path": "backend/app/experiments/sql_sample_gen.py",
"chars": 3210,
"preview": "import dspy\nfrom dspy.functional import TypedPredictor\nimport logging\nfrom pydantic import BaseModel, Field\nfrom typing "
},
{
"path": "backend/app/file_storage/__init__.py",
"chars": 181,
"preview": "from .base import FileStorage\nfrom .local import LocalFileStorage\n\n\ndef get_file_storage() -> FileStorage:\n return Lo"
},
{
"path": "backend/app/file_storage/base.py",
"chars": 586,
"preview": "from typing import IO\n\nfrom abc import ABC, abstractmethod\n\n\nclass FileStorage(ABC):\n @abstractmethod\n def open(se"
},
{
"path": "backend/app/file_storage/local.py",
"chars": 827,
"preview": "import os\nfrom typing import IO\n\nfrom app.file_storage.base import FileStorage\nfrom app.core.config import settings\n\n\ncl"
},
{
"path": "backend/app/logger.py",
"chars": 1192,
"preview": "import logging\nfrom logging.config import dictConfig\nfrom app.core.config import settings\n\nlogger = logging.getLogger(\"a"
},
{
"path": "backend/app/models/__init__.py",
"chars": 1264,
"preview": "# flake8: noqa\nfrom .entity import (\n EntityType,\n EntityPublic,\n get_kb_entity_model,\n)\nfrom .relationship imp"
},
{
"path": "backend/app/models/api_key.py",
"chars": 842,
"preview": "from uuid import UUID\nfrom typing import Optional\n\nfrom sqlmodel import (\n Field,\n Relationship as SQLRelationship"
},
{
"path": "backend/app/models/auth.py",
"chars": 1173,
"preview": "from typing import Optional\nfrom uuid import UUID\nfrom datetime import datetime\n\nfrom pydantic import EmailStr\nfrom sqlm"
},
{
"path": "backend/app/models/base.py",
"chars": 2606,
"preview": "import json\nfrom uuid import UUID\nfrom datetime import datetime\nfrom typing import Optional\nfrom sqlmodel import Field, "
},
{
"path": "backend/app/models/chat.py",
"chars": 2106,
"preview": "import enum\nfrom uuid import UUID\nfrom typing import Optional, Dict\nfrom pydantic import BaseModel\nfrom datetime import "
},
{
"path": "backend/app/models/chat_engine.py",
"chars": 1671,
"preview": "from typing import Optional, Dict\nfrom pydantic import BaseModel\nfrom datetime import datetime\n\nfrom sqlmodel import (\n "
},
{
"path": "backend/app/models/chat_message.py",
"chars": 1760,
"preview": "from uuid import UUID\nfrom typing import Optional, List\nfrom datetime import datetime\n\nfrom sqlmodel import (\n Field,"
},
{
"path": "backend/app/models/chunk.py",
"chars": 3060,
"preview": "import enum\nfrom app.utils.singleflight_cache import singleflight_cache\n\nfrom typing import Optional, Type\nfrom sqlmodel"
},
{
"path": "backend/app/models/data_source.py",
"chars": 1429,
"preview": "from enum import Enum\nfrom uuid import UUID\nfrom typing import Optional\nfrom datetime import datetime\n\nfrom sqlmodel imp"
},
{
"path": "backend/app/models/document.py",
"chars": 2457,
"preview": "import enum\nfrom typing import Optional\nfrom datetime import datetime\n\nfrom llama_index.core.schema import Document as L"
},
{
"path": "backend/app/models/embed_model.py",
"chars": 822,
"preview": "from typing import Optional, Any\n\nfrom sqlmodel import Field, Column, JSON, String\n\nfrom .base import UpdatableBaseModel"
},
{
"path": "backend/app/models/entity.py",
"chars": 2950,
"preview": "import enum\nfrom app.utils.singleflight_cache import singleflight_cache\nfrom typing import Optional, List, Dict, Type\n\nf"
},
{
"path": "backend/app/models/evaluation_dataset.py",
"chars": 1618,
"preview": "from uuid import UUID\nfrom typing import Optional, List\n\nfrom sqlalchemy import Text, JSON\n\nfrom sqlmodel import (\n F"
},
{
"path": "backend/app/models/evaluation_task.py",
"chars": 2164,
"preview": "import enum\nfrom uuid import UUID\nfrom typing import Optional, List\n\nfrom sqlalchemy import Text, JSON\n\nfrom sqlmodel im"
},
{
"path": "backend/app/models/feedback.py",
"chars": 2239,
"preview": "import enum\nfrom uuid import UUID\nfrom typing import Optional\nfrom pydantic import BaseModel\nfrom datetime import dateti"
},
{
"path": "backend/app/models/knowledge_base.py",
"chars": 6386,
"preview": "import enum\nfrom datetime import datetime\nfrom typing import Dict, Optional, Union\nfrom uuid import UUID\n\nfrom pydantic "
},
{
"path": "backend/app/models/knowledge_base_scoped/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/models/knowledge_base_scoped/table_naming.py",
"chars": 1695,
"preview": "import logging\nimport re\n\nfrom app.models.knowledge_base import KnowledgeBase\nfrom app.models.embed_model import DEFAULT"
},
{
"path": "backend/app/models/llm.py",
"chars": 913,
"preview": "from typing import Optional, Any\nfrom sqlmodel import Field, Column, JSON, String\nfrom pydantic import BaseModel\nfrom ap"
},
{
"path": "backend/app/models/recommend_question.py",
"chars": 699,
"preview": "from typing import Optional, List\n\nfrom sqlmodel import (\n Field,\n Column,\n JSON,\n Relationship as SQLRelati"
},
{
"path": "backend/app/models/relationship.py",
"chars": 3959,
"preview": "from datetime import datetime\nfrom app.utils.singleflight_cache import singleflight_cache\nfrom typing import Optional, T"
},
{
"path": "backend/app/models/reranker_model.py",
"chars": 1075,
"preview": "from typing import Optional, Any\n\nfrom sqlmodel import Field, Column, JSON, String\n\nfrom .base import UpdatableBaseModel"
},
{
"path": "backend/app/models/semantic_cache.py",
"chars": 1581,
"preview": "from typing import Optional, Any\nfrom datetime import datetime\n\nfrom sqlmodel import (\n SQLModel,\n Field,\n Colu"
},
{
"path": "backend/app/models/site_setting.py",
"chars": 1004,
"preview": "from typing import Optional\nfrom datetime import datetime\n\nfrom sqlmodel import SQLModel, Field, Column, JSON, func\nfrom"
},
{
"path": "backend/app/models/staff_action_log.py",
"chars": 567,
"preview": "from typing import Optional, Dict\nfrom datetime import datetime\n\nfrom sqlmodel import SQLModel, Field, Column, JSON, Dat"
},
{
"path": "backend/app/models/upload.py",
"chars": 808,
"preview": "from uuid import UUID\nfrom typing import Optional\n\nfrom sqlmodel import (\n Field,\n Column,\n String,\n Relatio"
},
{
"path": "backend/app/rag/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/rag/build_index.py",
"chars": 5925,
"preview": "import logging\nfrom typing import List, Optional, Type\n\nfrom llama_index.core import VectorStoreIndex\nfrom llama_index.c"
},
{
"path": "backend/app/rag/chat/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/rag/chat/chat_flow.py",
"chars": 32756,
"preview": "import json\nimport logging\nfrom datetime import datetime, UTC\nfrom typing import List, Optional, Generator, Tuple, Any\nf"
},
{
"path": "backend/app/rag/chat/chat_service.py",
"chars": 11032,
"preview": "from http import HTTPStatus\nimport logging\n\nfrom typing import Generator, List, Optional\nfrom uuid import UUID\n\nfrom fas"
},
{
"path": "backend/app/rag/chat/config.py",
"chars": 7309,
"preview": "import logging\nimport dspy\n\nfrom typing import Optional, List\nfrom pydantic import BaseModel, Field\nfrom sqlmodel import"
},
{
"path": "backend/app/rag/chat/retrieve/retrieve_flow.py",
"chars": 6276,
"preview": "import logging\nfrom datetime import datetime\nfrom typing import List, Optional, Tuple\n\nfrom llama_index.core.instrumenta"
},
{
"path": "backend/app/rag/chat/stream_protocol.py",
"chars": 1761,
"preview": "import json\nfrom dataclasses import dataclass\n\nfrom pydantic import BaseModel\n\nfrom app.models import ChatMessage, Chat\n"
},
{
"path": "backend/app/rag/datasource/__init__.py",
"chars": 972,
"preview": "from sqlmodel import Session\nfrom typing import Any\nfrom uuid import UUID\n\nfrom app.models import DataSourceType\nfrom .b"
},
{
"path": "backend/app/rag/datasource/base.py",
"chars": 902,
"preview": "from abc import ABC, abstractmethod\nfrom uuid import UUID\nfrom typing import Generator, Any\nfrom sqlmodel import Session"
},
{
"path": "backend/app/rag/datasource/consts.py",
"chars": 213,
"preview": "IGNORE_TAGS = [\n \"noscript\",\n \"title\",\n \"script\",\n \"style\",\n \"meta\",\n \"head\",\n \"header\",\n \"foote"
},
{
"path": "backend/app/rag/datasource/file.py",
"chars": 3458,
"preview": "import logging\nimport docx\nimport pptx\nimport openpyxl\nfrom pydantic import BaseModel\nfrom typing import Generator, IO\nf"
},
{
"path": "backend/app/rag/datasource/web_base.py",
"chars": 1830,
"preview": "import logging\nfrom datetime import datetime, UTC\nfrom typing import Generator\nfrom playwright.sync_api import sync_play"
},
{
"path": "backend/app/rag/datasource/web_single_page.py",
"chars": 801,
"preview": "import logging\nfrom pydantic import BaseModel\nfrom typing import Generator, List\n\nfrom app.models import Document\nfrom a"
},
{
"path": "backend/app/rag/datasource/web_sitemap.py",
"chars": 1480,
"preview": "import logging\nfrom typing import Generator\nfrom urllib.parse import urlparse, urljoin\n\nimport requests\nfrom pydantic im"
},
{
"path": "backend/app/rag/default_prompt.py",
"chars": 13451,
"preview": "DEFAULT_INTENT_GRAPH_KNOWLEDGE = \"\"\"\\\nGiven a list of prerequisite questions and their relevant knowledge for the user's"
},
{
"path": "backend/app/rag/embeddings/local/local_embedding.py",
"chars": 3967,
"preview": "\"\"\"Jina embeddings file.\"\"\"\n\nfrom typing import Any, List, Optional\nimport requests\n\nfrom llama_index.core.base.embeddin"
},
{
"path": "backend/app/rag/embeddings/open_like/openai_like_embedding.py",
"chars": 3258,
"preview": "from typing import Any, List, Optional\n\nfrom llama_index.core.base.embeddings.base import DEFAULT_EMBED_BATCH_SIZE\nfrom "
},
{
"path": "backend/app/rag/embeddings/provider.py",
"chars": 8138,
"preview": "import enum\n\nfrom typing import List\nfrom pydantic import BaseModel\n\n\nclass EmbeddingProvider(str, enum.Enum):\n OPENA"
},
{
"path": "backend/app/rag/embeddings/resolver.py",
"chars": 3506,
"preview": "from typing import Optional\n\nfrom llama_index.embeddings.azure_openai import AzureOpenAIEmbedding\nfrom sqlmodel import S"
},
{
"path": "backend/app/rag/indices/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/rag/indices/knowledge_graph/__init__.py",
"chars": 73,
"preview": "from .base import KnowledgeGraphIndex\n\n__all__ = [\"KnowledgeGraphIndex\"]\n"
},
{
"path": "backend/app/rag/indices/knowledge_graph/base.py",
"chars": 4568,
"preview": "import dspy\nimport logging\n\nfrom typing import Any, Dict, List, Optional, Sequence\nfrom llama_index.core.data_structs im"
},
{
"path": "backend/app/rag/indices/knowledge_graph/extractor.py",
"chars": 10543,
"preview": "import logging\nfrom copy import deepcopy\nimport pandas as pd\nimport dspy\nfrom typing import Mapping, Optional, List\n\nfro"
},
{
"path": "backend/app/rag/indices/knowledge_graph/graph_store/__init__.py",
"chars": 229,
"preview": "from .tidb_graph_store import TiDBGraphStore\nfrom .tidb_graph_editor import TiDBGraphEditor\nfrom .tidb_graph_store impor"
},
{
"path": "backend/app/rag/indices/knowledge_graph/graph_store/helpers.py",
"chars": 3732,
"preview": "import json\nfrom typing import List, Tuple, Mapping, Any\n\nfrom llama_index.embeddings.openai import OpenAIEmbedding, Ope"
},
{
"path": "backend/app/rag/indices/knowledge_graph/graph_store/schema.py",
"chars": 703,
"preview": "from abc import ABC, abstractmethod\nfrom typing import Dict, Optional, Tuple\n\nfrom sqlmodel import Session\n\n\nclass Knowl"
},
{
"path": "backend/app/rag/indices/knowledge_graph/graph_store/tidb_graph_editor.py",
"chars": 8464,
"preview": "from typing import Optional, Tuple, List, Type\n\nfrom llama_index.core.embeddings import resolve_embed_model\nfrom llama_i"
},
{
"path": "backend/app/rag/indices/knowledge_graph/graph_store/tidb_graph_store.py",
"chars": 48013,
"preview": "import dspy\nimport logging\nimport numpy as np\nimport tidb_vector\nfrom deepdiff import DeepDiff\nfrom typing import List, "
},
{
"path": "backend/app/rag/indices/knowledge_graph/schema.py",
"chars": 4404,
"preview": "from pydantic import BaseModel, Field\nfrom typing import Mapping, Any, List\n\n\nclass Entity(BaseModel):\n \"\"\"List of en"
},
{
"path": "backend/app/rag/indices/vector_search/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/rag/indices/vector_search/vector_store/__init__.py",
"chars": 30,
"preview": "__all__ = [\"TiDBVectorStore\"]\n"
},
{
"path": "backend/app/rag/indices/vector_search/vector_store/tidb_vector_store.py",
"chars": 8820,
"preview": "import logging\nimport tidb_vector\nimport sqlalchemy\n\nfrom typing import Any, List, Optional, Type\nfrom llama_index.core."
},
{
"path": "backend/app/rag/knowledge_base/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/rag/knowledge_base/config.py",
"chars": 1187,
"preview": "import logging\n\nfrom llama_index.core.base.embeddings.base import BaseEmbedding\nfrom sqlmodel import Session\n\nfrom app.m"
},
{
"path": "backend/app/rag/knowledge_base/index_store.py",
"chars": 2241,
"preview": "from sqlalchemy import inspection\nfrom sqlmodel import Session\n\nfrom app.models import KnowledgeBase\nfrom app.models.chu"
},
{
"path": "backend/app/rag/knowledge_base/schema.py",
"chars": 130,
"preview": "from enum import Enum\n\n\nclass KBIndexType(str, Enum):\n VECTOR_SEARCH = \"VECTOR_SEARCH\"\n KNOWLEDGE_GRAPH = \"KNOWLED"
},
{
"path": "backend/app/rag/llms/dspy.py",
"chars": 3397,
"preview": "import dspy\n\nfrom llama_index.core.base.llms.base import BaseLLM\n\n\ndef get_dspy_lm_by_llama_llm(llama_llm: BaseLLM) -> d"
},
{
"path": "backend/app/rag/llms/provider.py",
"chars": 7936,
"preview": "import enum\n\nfrom typing import List\nfrom pydantic import BaseModel\n\n\nclass LLMProvider(str, enum.Enum):\n OPENAI = \"o"
},
{
"path": "backend/app/rag/llms/resolver.py",
"chars": 5027,
"preview": "from typing import Optional\n\nfrom llama_index.core.llms.llm import LLM\nfrom sqlmodel import Session\n\nfrom app.repositori"
},
{
"path": "backend/app/rag/node_parser/__init__.py",
"chars": 80,
"preview": "from .file.markdown import MarkdownNodeParser\n\n__all__ = [\"MarkdownNodeParser\"]\n"
},
{
"path": "backend/app/rag/node_parser/file/markdown.py",
"chars": 10791,
"preview": "import re\nfrom typing import Any, Dict, List, Optional, Sequence, Callable\n\nfrom llama_index.core.callbacks.base import "
},
{
"path": "backend/app/rag/postprocessors/__init__.py",
"chars": 134,
"preview": "from .metadata_post_filter import MetadataPostFilter, MetadataFilters\n\n__all__ = [\n \"MetadataPostFilter\",\n \"Metada"
},
{
"path": "backend/app/rag/postprocessors/metadata_post_filter.py",
"chars": 2678,
"preview": "import logging\n\nfrom typing import Dict, List, Optional, Any, Union\nfrom llama_index.core import QueryBundle\nfrom llama_"
},
{
"path": "backend/app/rag/query_dispatcher.py",
"chars": 2201,
"preview": "import logging\nfrom typing import Optional, Sequence\nfrom llama_index.llms.openai import OpenAI\nfrom llama_index.core.to"
},
{
"path": "backend/app/rag/question_gen/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "backend/app/rag/question_gen/helpers.py",
"chars": 694,
"preview": "from typing import List\n\nfrom llama_index.core import QueryBundle\nfrom llama_index.core.base.llms.types import ChatMessa"
}
]
// ... and 691 more files (download for full content)
About this extraction
This page contains the full source code of the pingcap/tidb.ai GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 891 files (2.1 MB), approximately 609.2k tokens, and a symbol index with 2442 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.