Copy disabled (too large)
Download .txt
Showing preview only (13,319K chars total). Download the full file to get everything.
Repository: langchain-ai/langchain
Branch: master
Commit: 86238a775edc
Files: 2745
Total size: 12.1 MB
Directory structure:
gitextract_cpd1yxs9/
├── .devcontainer/
│ ├── README.md
│ ├── devcontainer.json
│ └── docker-compose.yaml
├── .dockerignore
├── .editorconfig
├── .gitattributes
├── .github/
│ ├── CODEOWNERS
│ ├── ISSUE_TEMPLATE/
│ │ ├── bug-report.yml
│ │ ├── config.yml
│ │ ├── feature-request.yml
│ │ ├── privileged.yml
│ │ └── task.yml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── actions/
│ │ └── uv_setup/
│ │ └── action.yml
│ ├── dependabot.yml
│ ├── scripts/
│ │ ├── check_diff.py
│ │ ├── check_prerelease_dependencies.py
│ │ ├── get_min_versions.py
│ │ ├── pr-labeler-config.json
│ │ └── pr-labeler.js
│ ├── tools/
│ │ └── git-restore-mtime
│ └── workflows/
│ ├── _compile_integration_test.yml
│ ├── _lint.yml
│ ├── _refresh_model_profiles.yml
│ ├── _release.yml
│ ├── _test.yml
│ ├── _test_pydantic.yml
│ ├── auto-label-by-package.yml
│ ├── check_agents_sync.yml
│ ├── check_core_versions.yml
│ ├── check_diffs.yml
│ ├── close_unchecked_issues.yml
│ ├── codspeed.yml
│ ├── integration_tests.yml
│ ├── pr_labeler.yml
│ ├── pr_labeler_backfill.yml
│ ├── pr_lint.yml
│ ├── refresh_model_profiles.yml
│ ├── reopen_on_assignment.yml
│ ├── require_issue_link.yml
│ ├── tag-external-issues.yml
│ └── v03_api_doc_build.yml
├── .gitignore
├── .markdownlint.json
├── .mcp.json
├── .pre-commit-config.yaml
├── .vscode/
│ ├── extensions.json
│ └── settings.json
├── AGENTS.md
├── CITATION.cff
├── CLAUDE.md
├── LICENSE
├── README.md
└── libs/
├── Makefile
├── README.md
├── core/
│ ├── Makefile
│ ├── README.md
│ ├── extended_testing_deps.txt
│ ├── langchain_core/
│ │ ├── __init__.py
│ │ ├── _api/
│ │ │ ├── __init__.py
│ │ │ ├── beta_decorator.py
│ │ │ ├── deprecation.py
│ │ │ ├── internal.py
│ │ │ └── path.py
│ │ ├── _import_utils.py
│ │ ├── _security/
│ │ │ ├── __init__.py
│ │ │ └── _ssrf_protection.py
│ │ ├── agents.py
│ │ ├── caches.py
│ │ ├── callbacks/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── file.py
│ │ │ ├── manager.py
│ │ │ ├── stdout.py
│ │ │ ├── streaming_stdout.py
│ │ │ └── usage.py
│ │ ├── chat_history.py
│ │ ├── chat_loaders.py
│ │ ├── chat_sessions.py
│ │ ├── cross_encoders.py
│ │ ├── document_loaders/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── blob_loaders.py
│ │ │ └── langsmith.py
│ │ ├── documents/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── compressor.py
│ │ │ └── transformers.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ ├── embeddings.py
│ │ │ └── fake.py
│ │ ├── env.py
│ │ ├── example_selectors/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── length_based.py
│ │ │ └── semantic_similarity.py
│ │ ├── exceptions.py
│ │ ├── globals.py
│ │ ├── indexing/
│ │ │ ├── __init__.py
│ │ │ ├── api.py
│ │ │ ├── base.py
│ │ │ └── in_memory.py
│ │ ├── language_models/
│ │ │ ├── __init__.py
│ │ │ ├── _utils.py
│ │ │ ├── base.py
│ │ │ ├── chat_models.py
│ │ │ ├── fake.py
│ │ │ ├── fake_chat_models.py
│ │ │ ├── llms.py
│ │ │ └── model_profile.py
│ │ ├── load/
│ │ │ ├── __init__.py
│ │ │ ├── _validation.py
│ │ │ ├── dump.py
│ │ │ ├── load.py
│ │ │ ├── mapping.py
│ │ │ └── serializable.py
│ │ ├── messages/
│ │ │ ├── __init__.py
│ │ │ ├── ai.py
│ │ │ ├── base.py
│ │ │ ├── block_translators/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── anthropic.py
│ │ │ │ ├── bedrock.py
│ │ │ │ ├── bedrock_converse.py
│ │ │ │ ├── google_genai.py
│ │ │ │ ├── google_vertexai.py
│ │ │ │ ├── groq.py
│ │ │ │ ├── langchain_v0.py
│ │ │ │ └── openai.py
│ │ │ ├── chat.py
│ │ │ ├── content.py
│ │ │ ├── function.py
│ │ │ ├── human.py
│ │ │ ├── modifier.py
│ │ │ ├── system.py
│ │ │ ├── tool.py
│ │ │ └── utils.py
│ │ ├── output_parsers/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── format_instructions.py
│ │ │ ├── json.py
│ │ │ ├── list.py
│ │ │ ├── openai_functions.py
│ │ │ ├── openai_tools.py
│ │ │ ├── pydantic.py
│ │ │ ├── string.py
│ │ │ ├── transform.py
│ │ │ └── xml.py
│ │ ├── outputs/
│ │ │ ├── __init__.py
│ │ │ ├── chat_generation.py
│ │ │ ├── chat_result.py
│ │ │ ├── generation.py
│ │ │ ├── llm_result.py
│ │ │ └── run_info.py
│ │ ├── prompt_values.py
│ │ ├── prompts/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── chat.py
│ │ │ ├── dict.py
│ │ │ ├── few_shot.py
│ │ │ ├── few_shot_with_templates.py
│ │ │ ├── image.py
│ │ │ ├── loading.py
│ │ │ ├── message.py
│ │ │ ├── prompt.py
│ │ │ ├── string.py
│ │ │ └── structured.py
│ │ ├── py.typed
│ │ ├── rate_limiters.py
│ │ ├── retrievers.py
│ │ ├── runnables/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── branch.py
│ │ │ ├── config.py
│ │ │ ├── configurable.py
│ │ │ ├── fallbacks.py
│ │ │ ├── graph.py
│ │ │ ├── graph_ascii.py
│ │ │ ├── graph_mermaid.py
│ │ │ ├── graph_png.py
│ │ │ ├── history.py
│ │ │ ├── passthrough.py
│ │ │ ├── retry.py
│ │ │ ├── router.py
│ │ │ ├── schema.py
│ │ │ └── utils.py
│ │ ├── stores.py
│ │ ├── structured_query.py
│ │ ├── sys_info.py
│ │ ├── tools/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── convert.py
│ │ │ ├── render.py
│ │ │ ├── retriever.py
│ │ │ ├── simple.py
│ │ │ └── structured.py
│ │ ├── tracers/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── _streaming.py
│ │ │ ├── base.py
│ │ │ ├── context.py
│ │ │ ├── core.py
│ │ │ ├── evaluation.py
│ │ │ ├── event_stream.py
│ │ │ ├── langchain.py
│ │ │ ├── log_stream.py
│ │ │ ├── memory_stream.py
│ │ │ ├── root_listeners.py
│ │ │ ├── run_collector.py
│ │ │ ├── schemas.py
│ │ │ └── stdout.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── _merge.py
│ │ │ ├── aiter.py
│ │ │ ├── env.py
│ │ │ ├── formatting.py
│ │ │ ├── function_calling.py
│ │ │ ├── html.py
│ │ │ ├── image.py
│ │ │ ├── input.py
│ │ │ ├── interactive_env.py
│ │ │ ├── iter.py
│ │ │ ├── json.py
│ │ │ ├── json_schema.py
│ │ │ ├── mustache.py
│ │ │ ├── pydantic.py
│ │ │ ├── strings.py
│ │ │ ├── usage.py
│ │ │ ├── utils.py
│ │ │ └── uuid.py
│ │ ├── vectorstores/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── in_memory.py
│ │ │ └── utils.py
│ │ └── version.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ ├── check_version.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── benchmarks/
│ │ ├── __init__.py
│ │ ├── test_async_callbacks.py
│ │ └── test_imports.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── _api/
│ │ ├── __init__.py
│ │ ├── test_beta_decorator.py
│ │ ├── test_deprecation.py
│ │ ├── test_imports.py
│ │ └── test_path.py
│ ├── caches/
│ │ ├── __init__.py
│ │ └── test_in_memory_cache.py
│ ├── callbacks/
│ │ ├── __init__.py
│ │ ├── test_async_callback_manager.py
│ │ ├── test_dispatch_custom_event.py
│ │ ├── test_handle_event.py
│ │ ├── test_imports.py
│ │ ├── test_sync_callback_manager.py
│ │ └── test_usage_callback.py
│ ├── chat_history/
│ │ ├── __init__.py
│ │ └── test_chat_history.py
│ ├── conftest.py
│ ├── data/
│ │ ├── prompt_file.txt
│ │ └── prompts/
│ │ ├── prompt_extra_args.json
│ │ ├── prompt_missing_args.json
│ │ └── simple_prompt.json
│ ├── dependencies/
│ │ ├── __init__.py
│ │ └── test_dependencies.py
│ ├── document_loaders/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ └── test_langsmith.py
│ ├── documents/
│ │ ├── __init__.py
│ │ ├── test_document.py
│ │ ├── test_imports.py
│ │ └── test_str.py
│ ├── embeddings/
│ │ ├── __init__.py
│ │ └── test_deterministic_embedding.py
│ ├── example_selectors/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_imports.py
│ │ ├── test_length_based_example_selector.py
│ │ └── test_similarity.py
│ ├── examples/
│ │ ├── example-non-utf8.csv
│ │ ├── example-non-utf8.txt
│ │ ├── example-utf8.csv
│ │ ├── example-utf8.txt
│ │ ├── example_prompt.json
│ │ ├── examples.json
│ │ ├── examples.yaml
│ │ ├── few_shot_prompt.json
│ │ ├── few_shot_prompt.yaml
│ │ ├── few_shot_prompt_example_prompt.json
│ │ ├── few_shot_prompt_examples_in.json
│ │ ├── few_shot_prompt_yaml_examples.yaml
│ │ ├── jinja_injection_prompt.json
│ │ ├── jinja_injection_prompt.yaml
│ │ ├── prompt_with_output_parser.json
│ │ ├── simple_prompt.json
│ │ ├── simple_prompt.yaml
│ │ ├── simple_prompt_with_template_file.json
│ │ └── simple_template.txt
│ ├── fake/
│ │ ├── __init__.py
│ │ ├── callbacks.py
│ │ └── test_fake_chat_model.py
│ ├── indexing/
│ │ ├── __init__.py
│ │ ├── test_hashed_document.py
│ │ ├── test_in_memory_indexer.py
│ │ ├── test_in_memory_record_manager.py
│ │ ├── test_indexing.py
│ │ └── test_public_api.py
│ ├── language_models/
│ │ ├── __init__.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_benchmark.py
│ │ │ ├── test_cache.py
│ │ │ └── test_rate_limiting.py
│ │ ├── llms/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ └── test_cache.py
│ │ ├── test_imports.py
│ │ └── test_model_profile.py
│ ├── load/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ ├── test_secret_injection.py
│ │ └── test_serializable.py
│ ├── messages/
│ │ ├── __init__.py
│ │ ├── block_translators/
│ │ │ ├── __init__.py
│ │ │ ├── test_anthropic.py
│ │ │ ├── test_bedrock.py
│ │ │ ├── test_bedrock_converse.py
│ │ │ ├── test_google_genai.py
│ │ │ ├── test_groq.py
│ │ │ ├── test_langchain_v0.py
│ │ │ ├── test_openai.py
│ │ │ └── test_registration.py
│ │ ├── test_ai.py
│ │ ├── test_imports.py
│ │ └── test_utils.py
│ ├── output_parsers/
│ │ ├── __init__.py
│ │ ├── test_base_parsers.py
│ │ ├── test_imports.py
│ │ ├── test_json.py
│ │ ├── test_list_parser.py
│ │ ├── test_openai_functions.py
│ │ ├── test_openai_tools.py
│ │ ├── test_pydantic_parser.py
│ │ └── test_xml_parser.py
│ ├── outputs/
│ │ ├── __init__.py
│ │ ├── test_chat_generation.py
│ │ └── test_imports.py
│ ├── prompt_file.txt
│ ├── prompts/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ ├── test_chat.ambr
│ │ │ └── test_prompt.ambr
│ │ ├── prompt_extra_args.json
│ │ ├── prompt_missing_args.json
│ │ ├── simple_prompt.json
│ │ ├── test_chat.py
│ │ ├── test_dict.py
│ │ ├── test_few_shot.py
│ │ ├── test_few_shot_with_templates.py
│ │ ├── test_image.py
│ │ ├── test_imports.py
│ │ ├── test_loading.py
│ │ ├── test_prompt.py
│ │ ├── test_string.py
│ │ ├── test_structured.py
│ │ └── test_utils.py
│ ├── pydantic_utils.py
│ ├── rate_limiters/
│ │ ├── __init__.py
│ │ └── test_in_memory_rate_limiter.py
│ ├── runnables/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ ├── test_fallbacks.ambr
│ │ │ ├── test_graph.ambr
│ │ │ └── test_runnable.ambr
│ │ ├── test_concurrency.py
│ │ ├── test_config.py
│ │ ├── test_configurable.py
│ │ ├── test_fallbacks.py
│ │ ├── test_graph.py
│ │ ├── test_history.py
│ │ ├── test_imports.py
│ │ ├── test_runnable.py
│ │ ├── test_runnable_events_v1.py
│ │ ├── test_runnable_events_v2.py
│ │ ├── test_tracing_interops.py
│ │ └── test_utils.py
│ ├── stores/
│ │ ├── __init__.py
│ │ └── test_in_memory.py
│ ├── stubs.py
│ ├── test_globals.py
│ ├── test_imports.py
│ ├── test_messages.py
│ ├── test_outputs.py
│ ├── test_prompt_values.py
│ ├── test_pydantic_imports.py
│ ├── test_pydantic_serde.py
│ ├── test_retrievers.py
│ ├── test_setup.py
│ ├── test_ssrf_protection.py
│ ├── test_sys_info.py
│ ├── test_tools.py
│ ├── tracers/
│ │ ├── __init__.py
│ │ ├── test_async_base_tracer.py
│ │ ├── test_automatic_metadata.py
│ │ ├── test_base_tracer.py
│ │ ├── test_imports.py
│ │ ├── test_langchain.py
│ │ ├── test_memory_stream.py
│ │ ├── test_run_collector.py
│ │ └── test_schemas.py
│ ├── utils/
│ │ ├── __init__.py
│ │ ├── test_aiter.py
│ │ ├── test_env.py
│ │ ├── test_formatting.py
│ │ ├── test_function_calling.py
│ │ ├── test_html.py
│ │ ├── test_imports.py
│ │ ├── test_iter.py
│ │ ├── test_json_schema.py
│ │ ├── test_pydantic.py
│ │ ├── test_rm_titles.py
│ │ ├── test_strings.py
│ │ ├── test_usage.py
│ │ ├── test_utils.py
│ │ └── test_uuid_utils.py
│ └── vectorstores/
│ ├── __init__.py
│ ├── test_in_memory.py
│ ├── test_utils.py
│ └── test_vectorstore.py
├── langchain/
│ ├── .dockerignore
│ ├── .flake8
│ ├── LICENSE
│ ├── Makefile
│ ├── README.md
│ ├── dev.Dockerfile
│ ├── extended_testing_deps.txt
│ ├── langchain_classic/
│ │ ├── __init__.py
│ │ ├── _api/
│ │ │ ├── __init__.py
│ │ │ ├── deprecation.py
│ │ │ ├── interactive_env.py
│ │ │ ├── module_import.py
│ │ │ └── path.py
│ │ ├── adapters/
│ │ │ ├── __init__.py
│ │ │ └── openai.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ ├── agent.py
│ │ │ ├── agent_iterator.py
│ │ │ ├── agent_toolkits/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── ainetwork/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── amadeus/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── azure_cognitive_services.py
│ │ │ │ ├── base.py
│ │ │ │ ├── clickup/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── conversational_retrieval/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── openai_functions.py
│ │ │ │ │ └── tool.py
│ │ │ │ ├── csv/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── file_management/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── github/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── gitlab/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── gmail/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── jira/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── json/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── multion/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── nasa/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── nla/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── tool.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── office365/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── openapi/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── planner.py
│ │ │ │ │ ├── planner_prompt.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ ├── spec.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── pandas/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── playwright/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── powerbi/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── chat_base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── python/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── slack/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── spark/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── spark_sql/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── sql/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── steam/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── vectorstore/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── xorbits/
│ │ │ │ │ └── __init__.py
│ │ │ │ └── zapier/
│ │ │ │ ├── __init__.py
│ │ │ │ └── toolkit.py
│ │ │ ├── agent_types.py
│ │ │ ├── chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── conversational/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── conversational_chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── format_scratchpad/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── log.py
│ │ │ │ ├── log_to_messages.py
│ │ │ │ ├── openai_functions.py
│ │ │ │ ├── openai_tools.py
│ │ │ │ ├── tools.py
│ │ │ │ └── xml.py
│ │ │ ├── initialize.py
│ │ │ ├── json_chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── load_tools.py
│ │ │ ├── loading.py
│ │ │ ├── mrkl/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── openai_assistant/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── openai_functions_agent/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── agent_token_buffer_memory.py
│ │ │ │ └── base.py
│ │ │ ├── openai_functions_multi_agent/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── openai_tools/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── output_parsers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── json.py
│ │ │ │ ├── openai_functions.py
│ │ │ │ ├── openai_tools.py
│ │ │ │ ├── react_json_single_input.py
│ │ │ │ ├── react_single_input.py
│ │ │ │ ├── self_ask.py
│ │ │ │ ├── tools.py
│ │ │ │ └── xml.py
│ │ │ ├── react/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── agent.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ ├── textworld_prompt.py
│ │ │ │ └── wiki_prompt.py
│ │ │ ├── schema.py
│ │ │ ├── self_ask_with_search/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── structured_chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── tool_calling_agent/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── tools.py
│ │ │ ├── types.py
│ │ │ ├── utils.py
│ │ │ └── xml/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── prompt.py
│ │ ├── base_language.py
│ │ ├── base_memory.py
│ │ ├── cache.py
│ │ ├── callbacks/
│ │ │ ├── __init__.py
│ │ │ ├── aim_callback.py
│ │ │ ├── argilla_callback.py
│ │ │ ├── arize_callback.py
│ │ │ ├── arthur_callback.py
│ │ │ ├── base.py
│ │ │ ├── clearml_callback.py
│ │ │ ├── comet_ml_callback.py
│ │ │ ├── confident_callback.py
│ │ │ ├── context_callback.py
│ │ │ ├── file.py
│ │ │ ├── flyte_callback.py
│ │ │ ├── human.py
│ │ │ ├── infino_callback.py
│ │ │ ├── labelstudio_callback.py
│ │ │ ├── llmonitor_callback.py
│ │ │ ├── manager.py
│ │ │ ├── mlflow_callback.py
│ │ │ ├── openai_info.py
│ │ │ ├── promptlayer_callback.py
│ │ │ ├── sagemaker_callback.py
│ │ │ ├── stdout.py
│ │ │ ├── streaming_aiter.py
│ │ │ ├── streaming_aiter_final_only.py
│ │ │ ├── streaming_stdout.py
│ │ │ ├── streaming_stdout_final_only.py
│ │ │ ├── streamlit/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── mutable_expander.py
│ │ │ │ └── streamlit_callback_handler.py
│ │ │ ├── tracers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── comet.py
│ │ │ │ ├── evaluation.py
│ │ │ │ ├── langchain.py
│ │ │ │ ├── log_stream.py
│ │ │ │ ├── logging.py
│ │ │ │ ├── root_listeners.py
│ │ │ │ ├── run_collector.py
│ │ │ │ ├── schemas.py
│ │ │ │ ├── stdout.py
│ │ │ │ └── wandb.py
│ │ │ ├── trubrics_callback.py
│ │ │ ├── utils.py
│ │ │ ├── wandb_callback.py
│ │ │ └── whylabs_callback.py
│ │ ├── chains/
│ │ │ ├── __init__.py
│ │ │ ├── api/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── news_docs.py
│ │ │ │ ├── open_meteo_docs.py
│ │ │ │ ├── openapi/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── chain.py
│ │ │ │ │ ├── prompts.py
│ │ │ │ │ ├── requests_chain.py
│ │ │ │ │ └── response_chain.py
│ │ │ │ ├── podcast_docs.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── tmdb_docs.py
│ │ │ ├── base.py
│ │ │ ├── chat_vector_db/
│ │ │ │ ├── __init__.py
│ │ │ │ └── prompts.py
│ │ │ ├── combine_documents/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── map_reduce.py
│ │ │ │ ├── map_rerank.py
│ │ │ │ ├── reduce.py
│ │ │ │ ├── refine.py
│ │ │ │ └── stuff.py
│ │ │ ├── constitutional_ai/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── models.py
│ │ │ │ ├── principles.py
│ │ │ │ └── prompts.py
│ │ │ ├── conversation/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── memory.py
│ │ │ │ └── prompt.py
│ │ │ ├── conversational_retrieval/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── elasticsearch_database/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── ernie_functions/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── example_generator.py
│ │ │ ├── flare/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── graph_qa/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── arangodb.py
│ │ │ │ ├── base.py
│ │ │ │ ├── cypher.py
│ │ │ │ ├── cypher_utils.py
│ │ │ │ ├── falkordb.py
│ │ │ │ ├── gremlin.py
│ │ │ │ ├── hugegraph.py
│ │ │ │ ├── kuzu.py
│ │ │ │ ├── nebulagraph.py
│ │ │ │ ├── neptune_cypher.py
│ │ │ │ ├── neptune_sparql.py
│ │ │ │ ├── ontotext_graphdb.py
│ │ │ │ ├── prompts.py
│ │ │ │ └── sparql.py
│ │ │ ├── history_aware_retriever.py
│ │ │ ├── hyde/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── llm.py
│ │ │ ├── llm_bash/
│ │ │ │ └── __init__.py
│ │ │ ├── llm_checker/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── llm_math/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── llm_requests.py
│ │ │ ├── llm_summarization_checker/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts/
│ │ │ │ ├── are_all_true_prompt.txt
│ │ │ │ ├── check_facts.txt
│ │ │ │ ├── create_facts.txt
│ │ │ │ └── revise_summary.txt
│ │ │ ├── llm_symbolic_math/
│ │ │ │ └── __init__.py
│ │ │ ├── loading.py
│ │ │ ├── mapreduce.py
│ │ │ ├── moderation.py
│ │ │ ├── natbot/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── crawler.py
│ │ │ │ └── prompt.py
│ │ │ ├── openai_functions/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── citation_fuzzy_match.py
│ │ │ │ ├── extraction.py
│ │ │ │ ├── openapi.py
│ │ │ │ ├── qa_with_structure.py
│ │ │ │ ├── tagging.py
│ │ │ │ └── utils.py
│ │ │ ├── openai_tools/
│ │ │ │ ├── __init__.py
│ │ │ │ └── extraction.py
│ │ │ ├── prompt_selector.py
│ │ │ ├── qa_generation/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── qa_with_sources/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── loading.py
│ │ │ │ ├── map_reduce_prompt.py
│ │ │ │ ├── refine_prompts.py
│ │ │ │ ├── retrieval.py
│ │ │ │ ├── stuff_prompt.py
│ │ │ │ └── vector_db.py
│ │ │ ├── query_constructor/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── ir.py
│ │ │ │ ├── parser.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── schema.py
│ │ │ ├── question_answering/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chain.py
│ │ │ │ ├── map_reduce_prompt.py
│ │ │ │ ├── map_rerank_prompt.py
│ │ │ │ ├── refine_prompts.py
│ │ │ │ └── stuff_prompt.py
│ │ │ ├── retrieval.py
│ │ │ ├── retrieval_qa/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── router/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── embedding_router.py
│ │ │ │ ├── llm_router.py
│ │ │ │ ├── multi_prompt.py
│ │ │ │ ├── multi_prompt_prompt.py
│ │ │ │ ├── multi_retrieval_prompt.py
│ │ │ │ └── multi_retrieval_qa.py
│ │ │ ├── sequential.py
│ │ │ ├── sql_database/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── query.py
│ │ │ ├── structured_output/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── summarize/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chain.py
│ │ │ │ ├── map_reduce_prompt.py
│ │ │ │ ├── refine_prompts.py
│ │ │ │ └── stuff_prompt.py
│ │ │ └── transform.py
│ │ ├── chat_loaders/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── facebook_messenger.py
│ │ │ ├── gmail.py
│ │ │ ├── imessage.py
│ │ │ ├── langsmith.py
│ │ │ ├── slack.py
│ │ │ ├── telegram.py
│ │ │ ├── utils.py
│ │ │ └── whatsapp.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ ├── anthropic.py
│ │ │ ├── anyscale.py
│ │ │ ├── azure_openai.py
│ │ │ ├── azureml_endpoint.py
│ │ │ ├── baichuan.py
│ │ │ ├── baidu_qianfan_endpoint.py
│ │ │ ├── base.py
│ │ │ ├── bedrock.py
│ │ │ ├── cohere.py
│ │ │ ├── databricks.py
│ │ │ ├── ernie.py
│ │ │ ├── everlyai.py
│ │ │ ├── fake.py
│ │ │ ├── fireworks.py
│ │ │ ├── gigachat.py
│ │ │ ├── google_palm.py
│ │ │ ├── human.py
│ │ │ ├── hunyuan.py
│ │ │ ├── javelin_ai_gateway.py
│ │ │ ├── jinachat.py
│ │ │ ├── konko.py
│ │ │ ├── litellm.py
│ │ │ ├── meta.py
│ │ │ ├── minimax.py
│ │ │ ├── mlflow.py
│ │ │ ├── mlflow_ai_gateway.py
│ │ │ ├── ollama.py
│ │ │ ├── openai.py
│ │ │ ├── pai_eas_endpoint.py
│ │ │ ├── promptlayer_openai.py
│ │ │ ├── tongyi.py
│ │ │ ├── vertexai.py
│ │ │ ├── volcengine_maas.py
│ │ │ └── yandex.py
│ │ ├── docstore/
│ │ │ ├── __init__.py
│ │ │ ├── arbitrary_fn.py
│ │ │ ├── base.py
│ │ │ ├── document.py
│ │ │ ├── in_memory.py
│ │ │ └── wikipedia.py
│ │ ├── document_loaders/
│ │ │ ├── __init__.py
│ │ │ ├── acreom.py
│ │ │ ├── airbyte.py
│ │ │ ├── airbyte_json.py
│ │ │ ├── airtable.py
│ │ │ ├── apify_dataset.py
│ │ │ ├── arcgis_loader.py
│ │ │ ├── arxiv.py
│ │ │ ├── assemblyai.py
│ │ │ ├── async_html.py
│ │ │ ├── azlyrics.py
│ │ │ ├── azure_ai_data.py
│ │ │ ├── azure_blob_storage_container.py
│ │ │ ├── azure_blob_storage_file.py
│ │ │ ├── baiducloud_bos_directory.py
│ │ │ ├── baiducloud_bos_file.py
│ │ │ ├── base.py
│ │ │ ├── base_o365.py
│ │ │ ├── bibtex.py
│ │ │ ├── bigquery.py
│ │ │ ├── bilibili.py
│ │ │ ├── blackboard.py
│ │ │ ├── blob_loaders/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── file_system.py
│ │ │ │ ├── schema.py
│ │ │ │ └── youtube_audio.py
│ │ │ ├── blockchain.py
│ │ │ ├── brave_search.py
│ │ │ ├── browserless.py
│ │ │ ├── chatgpt.py
│ │ │ ├── chromium.py
│ │ │ ├── college_confidential.py
│ │ │ ├── concurrent.py
│ │ │ ├── confluence.py
│ │ │ ├── conllu.py
│ │ │ ├── couchbase.py
│ │ │ ├── csv_loader.py
│ │ │ ├── cube_semantic.py
│ │ │ ├── datadog_logs.py
│ │ │ ├── dataframe.py
│ │ │ ├── diffbot.py
│ │ │ ├── directory.py
│ │ │ ├── discord.py
│ │ │ ├── docugami.py
│ │ │ ├── docusaurus.py
│ │ │ ├── dropbox.py
│ │ │ ├── duckdb_loader.py
│ │ │ ├── email.py
│ │ │ ├── epub.py
│ │ │ ├── etherscan.py
│ │ │ ├── evernote.py
│ │ │ ├── excel.py
│ │ │ ├── facebook_chat.py
│ │ │ ├── fauna.py
│ │ │ ├── figma.py
│ │ │ ├── gcs_directory.py
│ │ │ ├── gcs_file.py
│ │ │ ├── generic.py
│ │ │ ├── geodataframe.py
│ │ │ ├── git.py
│ │ │ ├── gitbook.py
│ │ │ ├── github.py
│ │ │ ├── google_speech_to_text.py
│ │ │ ├── googledrive.py
│ │ │ ├── gutenberg.py
│ │ │ ├── helpers.py
│ │ │ ├── hn.py
│ │ │ ├── html.py
│ │ │ ├── html_bs.py
│ │ │ ├── hugging_face_dataset.py
│ │ │ ├── ifixit.py
│ │ │ ├── image.py
│ │ │ ├── image_captions.py
│ │ │ ├── imsdb.py
│ │ │ ├── iugu.py
│ │ │ ├── joplin.py
│ │ │ ├── json_loader.py
│ │ │ ├── lakefs.py
│ │ │ ├── larksuite.py
│ │ │ ├── markdown.py
│ │ │ ├── mastodon.py
│ │ │ ├── max_compute.py
│ │ │ ├── mediawikidump.py
│ │ │ ├── merge.py
│ │ │ ├── mhtml.py
│ │ │ ├── modern_treasury.py
│ │ │ ├── mongodb.py
│ │ │ ├── news.py
│ │ │ ├── notebook.py
│ │ │ ├── notion.py
│ │ │ ├── notiondb.py
│ │ │ ├── nuclia.py
│ │ │ ├── obs_directory.py
│ │ │ ├── obs_file.py
│ │ │ ├── obsidian.py
│ │ │ ├── odt.py
│ │ │ ├── onedrive.py
│ │ │ ├── onedrive_file.py
│ │ │ ├── onenote.py
│ │ │ ├── open_city_data.py
│ │ │ ├── org_mode.py
│ │ │ ├── parsers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── audio.py
│ │ │ │ ├── docai.py
│ │ │ │ ├── generic.py
│ │ │ │ ├── grobid.py
│ │ │ │ ├── html/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── bs4.py
│ │ │ │ ├── language/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── cobol.py
│ │ │ │ │ ├── code_segmenter.py
│ │ │ │ │ ├── javascript.py
│ │ │ │ │ ├── language_parser.py
│ │ │ │ │ └── python.py
│ │ │ │ ├── msword.py
│ │ │ │ ├── pdf.py
│ │ │ │ ├── registry.py
│ │ │ │ └── txt.py
│ │ │ ├── pdf.py
│ │ │ ├── polars_dataframe.py
│ │ │ ├── powerpoint.py
│ │ │ ├── psychic.py
│ │ │ ├── pubmed.py
│ │ │ ├── pyspark_dataframe.py
│ │ │ ├── python.py
│ │ │ ├── quip.py
│ │ │ ├── readthedocs.py
│ │ │ ├── recursive_url_loader.py
│ │ │ ├── reddit.py
│ │ │ ├── roam.py
│ │ │ ├── rocksetdb.py
│ │ │ ├── rspace.py
│ │ │ ├── rss.py
│ │ │ ├── rst.py
│ │ │ ├── rtf.py
│ │ │ ├── s3_directory.py
│ │ │ ├── s3_file.py
│ │ │ ├── sharepoint.py
│ │ │ ├── sitemap.py
│ │ │ ├── slack_directory.py
│ │ │ ├── snowflake_loader.py
│ │ │ ├── spreedly.py
│ │ │ ├── srt.py
│ │ │ ├── stripe.py
│ │ │ ├── telegram.py
│ │ │ ├── tencent_cos_directory.py
│ │ │ ├── tencent_cos_file.py
│ │ │ ├── tensorflow_datasets.py
│ │ │ ├── text.py
│ │ │ ├── tomarkdown.py
│ │ │ ├── toml.py
│ │ │ ├── trello.py
│ │ │ ├── tsv.py
│ │ │ ├── twitter.py
│ │ │ ├── unstructured.py
│ │ │ ├── url.py
│ │ │ ├── url_playwright.py
│ │ │ ├── url_selenium.py
│ │ │ ├── weather.py
│ │ │ ├── web_base.py
│ │ │ ├── whatsapp_chat.py
│ │ │ ├── wikipedia.py
│ │ │ ├── word_document.py
│ │ │ ├── xml.py
│ │ │ ├── xorbits.py
│ │ │ └── youtube.py
│ │ ├── document_transformers/
│ │ │ ├── __init__.py
│ │ │ ├── beautiful_soup_transformer.py
│ │ │ ├── doctran_text_extract.py
│ │ │ ├── doctran_text_qa.py
│ │ │ ├── doctran_text_translate.py
│ │ │ ├── embeddings_redundant_filter.py
│ │ │ ├── google_translate.py
│ │ │ ├── html2text.py
│ │ │ ├── long_context_reorder.py
│ │ │ ├── nuclia_text_transform.py
│ │ │ ├── openai_functions.py
│ │ │ └── xsl/
│ │ │ └── html_chunks_with_headers.xslt
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ ├── aleph_alpha.py
│ │ │ ├── awa.py
│ │ │ ├── azure_openai.py
│ │ │ ├── baidu_qianfan_endpoint.py
│ │ │ ├── base.py
│ │ │ ├── bedrock.py
│ │ │ ├── bookend.py
│ │ │ ├── cache.py
│ │ │ ├── clarifai.py
│ │ │ ├── cloudflare_workersai.py
│ │ │ ├── cohere.py
│ │ │ ├── dashscope.py
│ │ │ ├── databricks.py
│ │ │ ├── deepinfra.py
│ │ │ ├── edenai.py
│ │ │ ├── elasticsearch.py
│ │ │ ├── embaas.py
│ │ │ ├── ernie.py
│ │ │ ├── fake.py
│ │ │ ├── fastembed.py
│ │ │ ├── google_palm.py
│ │ │ ├── gpt4all.py
│ │ │ ├── gradient_ai.py
│ │ │ ├── huggingface.py
│ │ │ ├── huggingface_hub.py
│ │ │ ├── infinity.py
│ │ │ ├── javelin_ai_gateway.py
│ │ │ ├── jina.py
│ │ │ ├── johnsnowlabs.py
│ │ │ ├── llamacpp.py
│ │ │ ├── llm_rails.py
│ │ │ ├── localai.py
│ │ │ ├── minimax.py
│ │ │ ├── mlflow.py
│ │ │ ├── mlflow_gateway.py
│ │ │ ├── modelscope_hub.py
│ │ │ ├── mosaicml.py
│ │ │ ├── nlpcloud.py
│ │ │ ├── octoai_embeddings.py
│ │ │ ├── ollama.py
│ │ │ ├── openai.py
│ │ │ ├── sagemaker_endpoint.py
│ │ │ ├── self_hosted.py
│ │ │ ├── self_hosted_hugging_face.py
│ │ │ ├── sentence_transformer.py
│ │ │ ├── spacy_embeddings.py
│ │ │ ├── tensorflow_hub.py
│ │ │ ├── vertexai.py
│ │ │ ├── voyageai.py
│ │ │ └── xinference.py
│ │ ├── env.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── agents/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── trajectory_eval_chain.py
│ │ │ │ └── trajectory_eval_prompt.py
│ │ │ ├── comparison/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ └── prompt.py
│ │ │ ├── criteria/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ └── prompt.py
│ │ │ ├── embedding_distance/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── exact_match/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── loading.py
│ │ │ ├── parsing/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── json_distance.py
│ │ │ │ └── json_schema.py
│ │ │ ├── qa/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ ├── eval_prompt.py
│ │ │ │ ├── generate_chain.py
│ │ │ │ └── generate_prompt.py
│ │ │ ├── regex_match/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── schema.py
│ │ │ ├── scoring/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ └── prompt.py
│ │ │ └── string_distance/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── example_generator.py
│ │ ├── formatting.py
│ │ ├── globals.py
│ │ ├── graphs/
│ │ │ ├── __init__.py
│ │ │ ├── arangodb_graph.py
│ │ │ ├── falkordb_graph.py
│ │ │ ├── graph_document.py
│ │ │ ├── graph_store.py
│ │ │ ├── hugegraph.py
│ │ │ ├── kuzu_graph.py
│ │ │ ├── memgraph_graph.py
│ │ │ ├── nebula_graph.py
│ │ │ ├── neo4j_graph.py
│ │ │ ├── neptune_graph.py
│ │ │ ├── networkx_graph.py
│ │ │ └── rdf_graph.py
│ │ ├── hub.py
│ │ ├── indexes/
│ │ │ ├── __init__.py
│ │ │ ├── _api.py
│ │ │ ├── _sql_record_manager.py
│ │ │ ├── graph.py
│ │ │ ├── prompts/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── entity_extraction.py
│ │ │ │ ├── entity_summarization.py
│ │ │ │ └── knowledge_triplet_extraction.py
│ │ │ └── vectorstore.py
│ │ ├── input.py
│ │ ├── llms/
│ │ │ ├── __init__.py
│ │ │ ├── ai21.py
│ │ │ ├── aleph_alpha.py
│ │ │ ├── amazon_api_gateway.py
│ │ │ ├── anthropic.py
│ │ │ ├── anyscale.py
│ │ │ ├── arcee.py
│ │ │ ├── aviary.py
│ │ │ ├── azureml_endpoint.py
│ │ │ ├── baidu_qianfan_endpoint.py
│ │ │ ├── bananadev.py
│ │ │ ├── base.py
│ │ │ ├── baseten.py
│ │ │ ├── beam.py
│ │ │ ├── bedrock.py
│ │ │ ├── bittensor.py
│ │ │ ├── cerebriumai.py
│ │ │ ├── chatglm.py
│ │ │ ├── clarifai.py
│ │ │ ├── cloudflare_workersai.py
│ │ │ ├── cohere.py
│ │ │ ├── ctransformers.py
│ │ │ ├── ctranslate2.py
│ │ │ ├── databricks.py
│ │ │ ├── deepinfra.py
│ │ │ ├── deepsparse.py
│ │ │ ├── edenai.py
│ │ │ ├── fake.py
│ │ │ ├── fireworks.py
│ │ │ ├── forefrontai.py
│ │ │ ├── gigachat.py
│ │ │ ├── google_palm.py
│ │ │ ├── gooseai.py
│ │ │ ├── gpt4all.py
│ │ │ ├── gradient_ai.py
│ │ │ ├── grammars/
│ │ │ │ ├── json.gbnf
│ │ │ │ └── list.gbnf
│ │ │ ├── huggingface_endpoint.py
│ │ │ ├── huggingface_hub.py
│ │ │ ├── huggingface_pipeline.py
│ │ │ ├── huggingface_text_gen_inference.py
│ │ │ ├── human.py
│ │ │ ├── javelin_ai_gateway.py
│ │ │ ├── koboldai.py
│ │ │ ├── llamacpp.py
│ │ │ ├── loading.py
│ │ │ ├── manifest.py
│ │ │ ├── minimax.py
│ │ │ ├── mlflow.py
│ │ │ ├── mlflow_ai_gateway.py
│ │ │ ├── modal.py
│ │ │ ├── mosaicml.py
│ │ │ ├── nlpcloud.py
│ │ │ ├── octoai_endpoint.py
│ │ │ ├── ollama.py
│ │ │ ├── opaqueprompts.py
│ │ │ ├── openai.py
│ │ │ ├── openllm.py
│ │ │ ├── openlm.py
│ │ │ ├── pai_eas_endpoint.py
│ │ │ ├── petals.py
│ │ │ ├── pipelineai.py
│ │ │ ├── predibase.py
│ │ │ ├── predictionguard.py
│ │ │ ├── promptlayer_openai.py
│ │ │ ├── replicate.py
│ │ │ ├── rwkv.py
│ │ │ ├── sagemaker_endpoint.py
│ │ │ ├── self_hosted.py
│ │ │ ├── self_hosted_hugging_face.py
│ │ │ ├── stochasticai.py
│ │ │ ├── symblai_nebula.py
│ │ │ ├── textgen.py
│ │ │ ├── titan_takeoff.py
│ │ │ ├── titan_takeoff_pro.py
│ │ │ ├── together.py
│ │ │ ├── tongyi.py
│ │ │ ├── utils.py
│ │ │ ├── vertexai.py
│ │ │ ├── vllm.py
│ │ │ ├── volcengine_maas.py
│ │ │ ├── watsonxllm.py
│ │ │ ├── writer.py
│ │ │ ├── xinference.py
│ │ │ └── yandex.py
│ │ ├── load/
│ │ │ ├── __init__.py
│ │ │ ├── dump.py
│ │ │ ├── load.py
│ │ │ └── serializable.py
│ │ ├── memory/
│ │ │ ├── __init__.py
│ │ │ ├── buffer.py
│ │ │ ├── buffer_window.py
│ │ │ ├── chat_memory.py
│ │ │ ├── chat_message_histories/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── astradb.py
│ │ │ │ ├── cassandra.py
│ │ │ │ ├── cosmos_db.py
│ │ │ │ ├── dynamodb.py
│ │ │ │ ├── elasticsearch.py
│ │ │ │ ├── file.py
│ │ │ │ ├── firestore.py
│ │ │ │ ├── in_memory.py
│ │ │ │ ├── momento.py
│ │ │ │ ├── mongodb.py
│ │ │ │ ├── neo4j.py
│ │ │ │ ├── postgres.py
│ │ │ │ ├── redis.py
│ │ │ │ ├── rocksetdb.py
│ │ │ │ ├── singlestoredb.py
│ │ │ │ ├── sql.py
│ │ │ │ ├── streamlit.py
│ │ │ │ ├── upstash_redis.py
│ │ │ │ ├── xata.py
│ │ │ │ └── zep.py
│ │ │ ├── combined.py
│ │ │ ├── entity.py
│ │ │ ├── kg.py
│ │ │ ├── motorhead_memory.py
│ │ │ ├── prompt.py
│ │ │ ├── readonly.py
│ │ │ ├── simple.py
│ │ │ ├── summary.py
│ │ │ ├── summary_buffer.py
│ │ │ ├── token_buffer.py
│ │ │ ├── utils.py
│ │ │ ├── vectorstore.py
│ │ │ ├── vectorstore_token_buffer_memory.py
│ │ │ └── zep_memory.py
│ │ ├── model_laboratory.py
│ │ ├── output_parsers/
│ │ │ ├── __init__.py
│ │ │ ├── boolean.py
│ │ │ ├── combining.py
│ │ │ ├── datetime.py
│ │ │ ├── enum.py
│ │ │ ├── ernie_functions.py
│ │ │ ├── fix.py
│ │ │ ├── format_instructions.py
│ │ │ ├── json.py
│ │ │ ├── list.py
│ │ │ ├── loading.py
│ │ │ ├── openai_functions.py
│ │ │ ├── openai_tools.py
│ │ │ ├── pandas_dataframe.py
│ │ │ ├── prompts.py
│ │ │ ├── pydantic.py
│ │ │ ├── rail_parser.py
│ │ │ ├── regex.py
│ │ │ ├── regex_dict.py
│ │ │ ├── retry.py
│ │ │ ├── structured.py
│ │ │ ├── xml.py
│ │ │ └── yaml.py
│ │ ├── prompts/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── chat.py
│ │ │ ├── example_selector/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── length_based.py
│ │ │ │ ├── ngram_overlap.py
│ │ │ │ └── semantic_similarity.py
│ │ │ ├── few_shot.py
│ │ │ ├── few_shot_with_templates.py
│ │ │ ├── loading.py
│ │ │ └── prompt.py
│ │ ├── py.typed
│ │ ├── python.py
│ │ ├── requests.py
│ │ ├── retrievers/
│ │ │ ├── __init__.py
│ │ │ ├── arcee.py
│ │ │ ├── arxiv.py
│ │ │ ├── azure_ai_search.py
│ │ │ ├── bedrock.py
│ │ │ ├── bm25.py
│ │ │ ├── chaindesk.py
│ │ │ ├── chatgpt_plugin_retriever.py
│ │ │ ├── cohere_rag_retriever.py
│ │ │ ├── contextual_compression.py
│ │ │ ├── databerry.py
│ │ │ ├── docarray.py
│ │ │ ├── document_compressors/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── chain_extract.py
│ │ │ │ ├── chain_extract_prompt.py
│ │ │ │ ├── chain_filter.py
│ │ │ │ ├── chain_filter_prompt.py
│ │ │ │ ├── cohere_rerank.py
│ │ │ │ ├── cross_encoder.py
│ │ │ │ ├── cross_encoder_rerank.py
│ │ │ │ ├── embeddings_filter.py
│ │ │ │ ├── flashrank_rerank.py
│ │ │ │ └── listwise_rerank.py
│ │ │ ├── elastic_search_bm25.py
│ │ │ ├── embedchain.py
│ │ │ ├── ensemble.py
│ │ │ ├── google_cloud_documentai_warehouse.py
│ │ │ ├── google_vertex_ai_search.py
│ │ │ ├── kay.py
│ │ │ ├── kendra.py
│ │ │ ├── knn.py
│ │ │ ├── llama_index.py
│ │ │ ├── merger_retriever.py
│ │ │ ├── metal.py
│ │ │ ├── milvus.py
│ │ │ ├── multi_query.py
│ │ │ ├── multi_vector.py
│ │ │ ├── outline.py
│ │ │ ├── parent_document_retriever.py
│ │ │ ├── pinecone_hybrid_search.py
│ │ │ ├── pubmed.py
│ │ │ ├── pupmed.py
│ │ │ ├── re_phraser.py
│ │ │ ├── remote_retriever.py
│ │ │ ├── self_query/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── astradb.py
│ │ │ │ ├── base.py
│ │ │ │ ├── chroma.py
│ │ │ │ ├── dashvector.py
│ │ │ │ ├── databricks_vector_search.py
│ │ │ │ ├── deeplake.py
│ │ │ │ ├── dingo.py
│ │ │ │ ├── elasticsearch.py
│ │ │ │ ├── milvus.py
│ │ │ │ ├── mongodb_atlas.py
│ │ │ │ ├── myscale.py
│ │ │ │ ├── opensearch.py
│ │ │ │ ├── pgvector.py
│ │ │ │ ├── pinecone.py
│ │ │ │ ├── qdrant.py
│ │ │ │ ├── redis.py
│ │ │ │ ├── supabase.py
│ │ │ │ ├── tencentvectordb.py
│ │ │ │ ├── timescalevector.py
│ │ │ │ ├── vectara.py
│ │ │ │ └── weaviate.py
│ │ │ ├── svm.py
│ │ │ ├── tavily_search_api.py
│ │ │ ├── tfidf.py
│ │ │ ├── time_weighted_retriever.py
│ │ │ ├── vespa_retriever.py
│ │ │ ├── weaviate_hybrid_search.py
│ │ │ ├── web_research.py
│ │ │ ├── wikipedia.py
│ │ │ ├── you.py
│ │ │ ├── zep.py
│ │ │ └── zilliz.py
│ │ ├── runnables/
│ │ │ ├── __init__.py
│ │ │ ├── hub.py
│ │ │ └── openai_functions.py
│ │ ├── schema/
│ │ │ ├── __init__.py
│ │ │ ├── agent.py
│ │ │ ├── cache.py
│ │ │ ├── callbacks/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── manager.py
│ │ │ │ ├── stdout.py
│ │ │ │ ├── streaming_stdout.py
│ │ │ │ └── tracers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── evaluation.py
│ │ │ │ ├── langchain.py
│ │ │ │ ├── log_stream.py
│ │ │ │ ├── root_listeners.py
│ │ │ │ ├── run_collector.py
│ │ │ │ ├── schemas.py
│ │ │ │ └── stdout.py
│ │ │ ├── chat.py
│ │ │ ├── chat_history.py
│ │ │ ├── document.py
│ │ │ ├── embeddings.py
│ │ │ ├── exceptions.py
│ │ │ ├── language_model.py
│ │ │ ├── memory.py
│ │ │ ├── messages.py
│ │ │ ├── output.py
│ │ │ ├── output_parser.py
│ │ │ ├── prompt.py
│ │ │ ├── prompt_template.py
│ │ │ ├── retriever.py
│ │ │ ├── runnable/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── branch.py
│ │ │ │ ├── config.py
│ │ │ │ ├── configurable.py
│ │ │ │ ├── fallbacks.py
│ │ │ │ ├── history.py
│ │ │ │ ├── passthrough.py
│ │ │ │ ├── retry.py
│ │ │ │ ├── router.py
│ │ │ │ └── utils.py
│ │ │ ├── storage.py
│ │ │ └── vectorstore.py
│ │ ├── serpapi.py
│ │ ├── smith/
│ │ │ ├── __init__.py
│ │ │ └── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── config.py
│ │ │ ├── name_generation.py
│ │ │ ├── progress.py
│ │ │ ├── runner_utils.py
│ │ │ └── string_run_evaluator.py
│ │ ├── sql_database.py
│ │ ├── storage/
│ │ │ ├── __init__.py
│ │ │ ├── _lc_store.py
│ │ │ ├── encoder_backed.py
│ │ │ ├── exceptions.py
│ │ │ ├── file_system.py
│ │ │ ├── in_memory.py
│ │ │ ├── redis.py
│ │ │ └── upstash_redis.py
│ │ ├── text_splitter.py
│ │ ├── tools/
│ │ │ ├── __init__.py
│ │ │ ├── ainetwork/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── app.py
│ │ │ │ ├── base.py
│ │ │ │ ├── owner.py
│ │ │ │ ├── rule.py
│ │ │ │ ├── transfer.py
│ │ │ │ └── value.py
│ │ │ ├── amadeus/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── closest_airport.py
│ │ │ │ └── flight_search.py
│ │ │ ├── arxiv/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── azure_cognitive_services/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── form_recognizer.py
│ │ │ │ ├── image_analysis.py
│ │ │ │ ├── speech2text.py
│ │ │ │ ├── text2speech.py
│ │ │ │ └── text_analytics_health.py
│ │ │ ├── base.py
│ │ │ ├── bearly/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── bing_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── brave_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── clickup/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── convert_to_openai.py
│ │ │ ├── dataforseo_api_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── ddg_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── e2b_data_analysis/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── edenai/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── audio_speech_to_text.py
│ │ │ │ ├── audio_text_to_speech.py
│ │ │ │ ├── edenai_base_tool.py
│ │ │ │ ├── image_explicitcontent.py
│ │ │ │ ├── image_objectdetection.py
│ │ │ │ ├── ocr_identityparser.py
│ │ │ │ ├── ocr_invoiceparser.py
│ │ │ │ └── text_moderation.py
│ │ │ ├── eleven_labs/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── models.py
│ │ │ │ └── text2speech.py
│ │ │ ├── file_management/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── copy.py
│ │ │ │ ├── delete.py
│ │ │ │ ├── file_search.py
│ │ │ │ ├── list_dir.py
│ │ │ │ ├── move.py
│ │ │ │ ├── read.py
│ │ │ │ └── write.py
│ │ │ ├── github/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── gitlab/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── gmail/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── create_draft.py
│ │ │ │ ├── get_message.py
│ │ │ │ ├── get_thread.py
│ │ │ │ ├── search.py
│ │ │ │ └── send_message.py
│ │ │ ├── golden_query/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_cloud/
│ │ │ │ ├── __init__.py
│ │ │ │ └── texttospeech.py
│ │ │ ├── google_finance/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_jobs/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_lens/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_places/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_scholar/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_serper/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_trends/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── graphql/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── human/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── ifttt.py
│ │ │ ├── interaction/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── jira/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── json/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── memorize/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── merriam_webster/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── metaphor_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── multion/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── close_session.py
│ │ │ │ ├── create_session.py
│ │ │ │ └── update_session.py
│ │ │ ├── nasa/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── nuclia/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── office365/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── create_draft_message.py
│ │ │ │ ├── events_search.py
│ │ │ │ ├── messages_search.py
│ │ │ │ ├── send_event.py
│ │ │ │ └── send_message.py
│ │ │ ├── openapi/
│ │ │ │ ├── __init__.py
│ │ │ │ └── utils/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── api_models.py
│ │ │ │ └── openapi_utils.py
│ │ │ ├── openweathermap/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── playwright/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── click.py
│ │ │ │ ├── current_page.py
│ │ │ │ ├── extract_hyperlinks.py
│ │ │ │ ├── extract_text.py
│ │ │ │ ├── get_elements.py
│ │ │ │ ├── navigate.py
│ │ │ │ └── navigate_back.py
│ │ │ ├── plugin.py
│ │ │ ├── powerbi/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── pubmed/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── python/
│ │ │ │ └── __init__.py
│ │ │ ├── reddit_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── render.py
│ │ │ ├── requests/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── retriever.py
│ │ │ ├── scenexplain/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── searchapi/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── searx_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── shell/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── slack/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── get_channel.py
│ │ │ │ ├── get_message.py
│ │ │ │ ├── schedule_message.py
│ │ │ │ └── send_message.py
│ │ │ ├── sleep/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── spark_sql/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── sql_database/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── tool.py
│ │ │ ├── stackexchange/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── steam/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── steamship_image_generation/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── tavily_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── vectorstore/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── wikipedia/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── wolfram_alpha/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── yahoo_finance_news.py
│ │ │ ├── youtube/
│ │ │ │ ├── __init__.py
│ │ │ │ └── search.py
│ │ │ └── zapier/
│ │ │ ├── __init__.py
│ │ │ └── tool.py
│ │ ├── utilities/
│ │ │ ├── __init__.py
│ │ │ ├── alpha_vantage.py
│ │ │ ├── anthropic.py
│ │ │ ├── apify.py
│ │ │ ├── arcee.py
│ │ │ ├── arxiv.py
│ │ │ ├── asyncio.py
│ │ │ ├── awslambda.py
│ │ │ ├── bibtex.py
│ │ │ ├── bing_search.py
│ │ │ ├── brave_search.py
│ │ │ ├── clickup.py
│ │ │ ├── dalle_image_generator.py
│ │ │ ├── dataforseo_api_search.py
│ │ │ ├── duckduckgo_search.py
│ │ │ ├── github.py
│ │ │ ├── gitlab.py
│ │ │ ├── golden_query.py
│ │ │ ├── google_finance.py
│ │ │ ├── google_jobs.py
│ │ │ ├── google_lens.py
│ │ │ ├── google_places_api.py
│ │ │ ├── google_scholar.py
│ │ │ ├── google_search.py
│ │ │ ├── google_serper.py
│ │ │ ├── google_trends.py
│ │ │ ├── graphql.py
│ │ │ ├── jira.py
│ │ │ ├── max_compute.py
│ │ │ ├── merriam_webster.py
│ │ │ ├── metaphor_search.py
│ │ │ ├── nasa.py
│ │ │ ├── opaqueprompts.py
│ │ │ ├── openapi.py
│ │ │ ├── openweathermap.py
│ │ │ ├── outline.py
│ │ │ ├── portkey.py
│ │ │ ├── powerbi.py
│ │ │ ├── pubmed.py
│ │ │ ├── python.py
│ │ │ ├── reddit_search.py
│ │ │ ├── redis.py
│ │ │ ├── requests.py
│ │ │ ├── scenexplain.py
│ │ │ ├── searchapi.py
│ │ │ ├── searx_search.py
│ │ │ ├── serpapi.py
│ │ │ ├── spark_sql.py
│ │ │ ├── sql_database.py
│ │ │ ├── stackexchange.py
│ │ │ ├── steam.py
│ │ │ ├── tavily_search.py
│ │ │ ├── tensorflow_datasets.py
│ │ │ ├── twilio.py
│ │ │ ├── vertexai.py
│ │ │ ├── wikipedia.py
│ │ │ ├── wolfram_alpha.py
│ │ │ └── zapier.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── aiter.py
│ │ │ ├── env.py
│ │ │ ├── ernie_functions.py
│ │ │ ├── formatting.py
│ │ │ ├── html.py
│ │ │ ├── input.py
│ │ │ ├── iter.py
│ │ │ ├── json_schema.py
│ │ │ ├── math.py
│ │ │ ├── openai.py
│ │ │ ├── openai_functions.py
│ │ │ ├── pydantic.py
│ │ │ ├── strings.py
│ │ │ └── utils.py
│ │ └── vectorstores/
│ │ ├── __init__.py
│ │ ├── alibabacloud_opensearch.py
│ │ ├── analyticdb.py
│ │ ├── annoy.py
│ │ ├── astradb.py
│ │ ├── atlas.py
│ │ ├── awadb.py
│ │ ├── azure_cosmos_db.py
│ │ ├── azuresearch.py
│ │ ├── bageldb.py
│ │ ├── baiducloud_vector_search.py
│ │ ├── base.py
│ │ ├── cassandra.py
│ │ ├── chroma.py
│ │ ├── clarifai.py
│ │ ├── clickhouse.py
│ │ ├── dashvector.py
│ │ ├── databricks_vector_search.py
│ │ ├── deeplake.py
│ │ ├── dingo.py
│ │ ├── docarray/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── hnsw.py
│ │ │ └── in_memory.py
│ │ ├── elastic_vector_search.py
│ │ ├── elasticsearch.py
│ │ ├── epsilla.py
│ │ ├── faiss.py
│ │ ├── hippo.py
│ │ ├── hologres.py
│ │ ├── lancedb.py
│ │ ├── llm_rails.py
│ │ ├── marqo.py
│ │ ├── matching_engine.py
│ │ ├── meilisearch.py
│ │ ├── milvus.py
│ │ ├── momento_vector_index.py
│ │ ├── mongodb_atlas.py
│ │ ├── myscale.py
│ │ ├── neo4j_vector.py
│ │ ├── nucliadb.py
│ │ ├── opensearch_vector_search.py
│ │ ├── pgembedding.py
│ │ ├── pgvecto_rs.py
│ │ ├── pgvector.py
│ │ ├── pinecone.py
│ │ ├── qdrant.py
│ │ ├── redis/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── filters.py
│ │ │ └── schema.py
│ │ ├── rocksetdb.py
│ │ ├── scann.py
│ │ ├── semadb.py
│ │ ├── singlestoredb.py
│ │ ├── sklearn.py
│ │ ├── sqlitevss.py
│ │ ├── starrocks.py
│ │ ├── supabase.py
│ │ ├── tair.py
│ │ ├── tencentvectordb.py
│ │ ├── tiledb.py
│ │ ├── timescalevector.py
│ │ ├── typesense.py
│ │ ├── usearch.py
│ │ ├── utils.py
│ │ ├── vald.py
│ │ ├── vearch.py
│ │ ├── vectara.py
│ │ ├── vespa.py
│ │ ├── weaviate.py
│ │ ├── xata.py
│ │ ├── yellowbrick.py
│ │ ├── zep.py
│ │ └── zilliz.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── data.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ ├── cache/
│ │ │ ├── __init__.py
│ │ │ └── fake_embeddings.py
│ │ ├── chains/
│ │ │ ├── __init__.py
│ │ │ └── openai_functions/
│ │ │ ├── __init__.py
│ │ │ └── test_openapi.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── conftest.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ └── embedding_distance/
│ │ │ ├── __init__.py
│ │ │ └── test_embedding.py
│ │ ├── examples/
│ │ │ ├── README.org
│ │ │ ├── README.rst
│ │ │ ├── brandfetch-brandfetch-2.0.0-resolved.json
│ │ │ ├── default-encoding.py
│ │ │ ├── example-utf8.html
│ │ │ ├── example.html
│ │ │ ├── example.json
│ │ │ ├── example.mht
│ │ │ ├── facebook_chat.json
│ │ │ ├── factbook.xml
│ │ │ ├── fake-email-attachment.eml
│ │ │ ├── fake.odt
│ │ │ ├── hello.msg
│ │ │ ├── hello_world.js
│ │ │ ├── hello_world.py
│ │ │ ├── non-utf8-encoding.py
│ │ │ ├── sample_rss_feeds.opml
│ │ │ ├── sitemap.xml
│ │ │ ├── stanley-cups.csv
│ │ │ ├── stanley-cups.tsv
│ │ │ ├── stanley-cups.xlsx
│ │ │ └── whatsapp_chat.txt
│ │ ├── memory/
│ │ │ ├── __init__.py
│ │ │ └── docker-compose/
│ │ │ └── elasticsearch.yml
│ │ ├── prompts/
│ │ │ └── __init__.py
│ │ ├── retrievers/
│ │ │ └── document_compressors/
│ │ │ ├── __init__.py
│ │ │ ├── test_cohere_reranker.py
│ │ │ └── test_listwise_rerank.py
│ │ ├── test_compile.py
│ │ ├── test_hub.py
│ │ └── test_schema.py
│ ├── mock_servers/
│ │ ├── __init__.py
│ │ └── robot/
│ │ ├── __init__.py
│ │ └── server.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── _api/
│ │ ├── __init__.py
│ │ └── test_importing.py
│ ├── agents/
│ │ ├── __init__.py
│ │ ├── agent_toolkits/
│ │ │ ├── __init__.py
│ │ │ └── test_imports.py
│ │ ├── format_scratchpad/
│ │ │ ├── __init__.py
│ │ │ ├── test_log.py
│ │ │ ├── test_log_to_messages.py
│ │ │ ├── test_openai_functions.py
│ │ │ ├── test_openai_tools.py
│ │ │ └── test_xml.py
│ │ ├── output_parsers/
│ │ │ ├── __init__.py
│ │ │ ├── test_convo_output_parser.py
│ │ │ ├── test_json.py
│ │ │ ├── test_openai_functions.py
│ │ │ ├── test_react_json_single_input.py
│ │ │ ├── test_react_single_input.py
│ │ │ ├── test_self_ask.py
│ │ │ └── test_xml.py
│ │ ├── test_agent.py
│ │ ├── test_agent_async.py
│ │ ├── test_agent_iterator.py
│ │ ├── test_chat.py
│ │ ├── test_imports.py
│ │ ├── test_initialize.py
│ │ ├── test_mrkl.py
│ │ ├── test_mrkl_output_parser.py
│ │ ├── test_openai_assistant.py
│ │ ├── test_openai_functions_multi.py
│ │ ├── test_public_api.py
│ │ ├── test_structured_chat.py
│ │ └── test_types.py
│ ├── callbacks/
│ │ ├── __init__.py
│ │ ├── fake_callback_handler.py
│ │ ├── test_base.py
│ │ ├── test_file.py
│ │ ├── test_imports.py
│ │ ├── test_manager.py
│ │ ├── test_stdout.py
│ │ └── tracers/
│ │ ├── __init__.py
│ │ └── test_logging.py
│ ├── chains/
│ │ ├── __init__.py
│ │ ├── query_constructor/
│ │ │ ├── __init__.py
│ │ │ └── test_parser.py
│ │ ├── question_answering/
│ │ │ ├── __init__.py
│ │ │ └── test_map_rerank_prompt.py
│ │ ├── test_base.py
│ │ ├── test_combine_documents.py
│ │ ├── test_constitutional_ai.py
│ │ ├── test_conversation.py
│ │ ├── test_conversation_retrieval.py
│ │ ├── test_flare.py
│ │ ├── test_history_aware_retriever.py
│ │ ├── test_hyde.py
│ │ ├── test_imports.py
│ │ ├── test_llm_checker.py
│ │ ├── test_llm_math.py
│ │ ├── test_llm_summarization_checker.py
│ │ ├── test_memory.py
│ │ ├── test_qa_with_sources.py
│ │ ├── test_retrieval.py
│ │ ├── test_sequential.py
│ │ ├── test_summary_buffer_memory.py
│ │ └── test_transform.py
│ ├── chat_models/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ └── test_imports.py
│ ├── conftest.py
│ ├── data/
│ │ ├── prompt_file.txt
│ │ └── prompts/
│ │ ├── prompt_extra_args.json
│ │ ├── prompt_missing_args.json
│ │ └── simple_prompt.json
│ ├── docstore/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── document_loaders/
│ │ ├── __init__.py
│ │ ├── blob_loaders/
│ │ │ ├── __init__.py
│ │ │ └── test_public_api.py
│ │ ├── parsers/
│ │ │ ├── __init__.py
│ │ │ └── test_public_api.py
│ │ ├── test_base.py
│ │ └── test_imports.py
│ ├── document_transformers/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── embeddings/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_caching.py
│ │ └── test_imports.py
│ ├── evaluation/
│ │ ├── __init__.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── comparison/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── criteria/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── exact_match/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── parsing/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_json_distance.py
│ │ │ └── test_json_schema.py
│ │ ├── qa/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── regex_match/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── run_evaluators/
│ │ │ └── __init__.py
│ │ ├── scoring/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── string_distance/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ └── test_imports.py
│ ├── examples/
│ │ ├── example-non-utf8.csv
│ │ ├── example-non-utf8.txt
│ │ ├── example-utf8.csv
│ │ ├── example-utf8.txt
│ │ └── test_specs/
│ │ ├── apis-guru/
│ │ │ └── apispec.json
│ │ ├── biztoc/
│ │ │ └── apispec.json
│ │ ├── calculator/
│ │ │ └── apispec.json
│ │ ├── datasette/
│ │ │ └── apispec.json
│ │ ├── freetv-app/
│ │ │ └── apispec.json
│ │ ├── joinmilo/
│ │ │ └── apispec.json
│ │ ├── klarna/
│ │ │ └── apispec.json
│ │ ├── milo/
│ │ │ └── apispec.json
│ │ ├── quickchart/
│ │ │ └── apispec.json
│ │ ├── robot/
│ │ │ └── apispec.yaml
│ │ ├── robot_openapi.yaml
│ │ ├── schooldigger/
│ │ │ └── apispec.json
│ │ ├── shop/
│ │ │ └── apispec.json
│ │ ├── slack/
│ │ │ └── apispec.json
│ │ ├── speak/
│ │ │ └── apispec.json
│ │ ├── urlbox/
│ │ │ └── apispec.json
│ │ ├── wellknown/
│ │ │ └── apispec.json
│ │ ├── wolframalpha/
│ │ │ └── apispec.json
│ │ ├── wolframcloud/
│ │ │ └── apispec.json
│ │ └── zapier/
│ │ └── apispec.json
│ ├── graphs/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── indexes/
│ │ ├── __init__.py
│ │ ├── test_api.py
│ │ ├── test_imports.py
│ │ └── test_indexing.py
│ ├── llms/
│ │ ├── __init__.py
│ │ ├── fake_chat_model.py
│ │ ├── fake_llm.py
│ │ ├── test_base.py
│ │ ├── test_fake_chat_model.py
│ │ └── test_imports.py
│ ├── load/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_dump.ambr
│ │ ├── test_dump.py
│ │ ├── test_imports.py
│ │ └── test_load.py
│ ├── memory/
│ │ ├── __init__.py
│ │ ├── chat_message_histories/
│ │ │ ├── __init__.py
│ │ │ └── test_imports.py
│ │ ├── test_combined_memory.py
│ │ └── test_imports.py
│ ├── output_parsers/
│ │ ├── __init__.py
│ │ ├── test_boolean_parser.py
│ │ ├── test_combining_parser.py
│ │ ├── test_datetime_parser.py
│ │ ├── test_enum_parser.py
│ │ ├── test_fix.py
│ │ ├── test_imports.py
│ │ ├── test_json.py
│ │ ├── test_pandas_dataframe_parser.py
│ │ ├── test_regex.py
│ │ ├── test_regex_dict.py
│ │ ├── test_retry.py
│ │ ├── test_structured_parser.py
│ │ └── test_yaml_parser.py
│ ├── prompts/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_chat.py
│ │ ├── test_few_shot.py
│ │ ├── test_few_shot_with_templates.py
│ │ ├── test_imports.py
│ │ ├── test_loading.py
│ │ └── test_prompt.py
│ ├── retrievers/
│ │ ├── __init__.py
│ │ ├── document_compressors/
│ │ │ ├── __init__.py
│ │ │ ├── test_chain_extract.py
│ │ │ ├── test_chain_filter.py
│ │ │ └── test_listwise_rerank.py
│ │ ├── parrot_retriever.py
│ │ ├── self_query/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── sequential_retriever.py
│ │ ├── test_ensemble.py
│ │ ├── test_imports.py
│ │ ├── test_multi_query.py
│ │ ├── test_multi_vector.py
│ │ ├── test_parent_document.py
│ │ └── test_time_weighted_retriever.py
│ ├── runnables/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_openai_functions.ambr
│ │ ├── test_hub.py
│ │ └── test_openai_functions.py
│ ├── schema/
│ │ ├── __init__.py
│ │ ├── runnable/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_branch.py
│ │ │ ├── test_config.py
│ │ │ ├── test_configurable.py
│ │ │ ├── test_fallbacks.py
│ │ │ ├── test_history.py
│ │ │ ├── test_imports.py
│ │ │ ├── test_passthrough.py
│ │ │ ├── test_retry.py
│ │ │ ├── test_router.py
│ │ │ └── test_utils.py
│ │ ├── test_agent.py
│ │ ├── test_cache.py
│ │ ├── test_chat.py
│ │ ├── test_chat_history.py
│ │ ├── test_document.py
│ │ ├── test_embeddings.py
│ │ ├── test_exceptions.py
│ │ ├── test_imports.py
│ │ ├── test_language_model.py
│ │ ├── test_memory.py
│ │ ├── test_messages.py
│ │ ├── test_output.py
│ │ ├── test_output_parser.py
│ │ ├── test_prompt.py
│ │ ├── test_prompt_template.py
│ │ ├── test_retriever.py
│ │ ├── test_storage.py
│ │ └── test_vectorstore.py
│ ├── smith/
│ │ ├── __init__.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── test_runner_utils.py
│ │ │ └── test_string_run_evaluator.py
│ │ └── test_imports.py
│ ├── storage/
│ │ ├── __init__.py
│ │ ├── test_filesystem.py
│ │ ├── test_imports.py
│ │ └── test_lc_store.py
│ ├── stubs.py
│ ├── test_dependencies.py
│ ├── test_formatting.py
│ ├── test_globals.py
│ ├── test_imports.py
│ ├── test_pytest_config.py
│ ├── test_schema.py
│ ├── test_utils.py
│ ├── tools/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_imports.py
│ │ └── test_render.py
│ ├── utilities/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── utils/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ ├── test_iter.py
│ │ └── test_openai_functions.py
│ └── vectorstores/
│ ├── __init__.py
│ └── test_public_api.py
├── langchain_v1/
│ ├── LICENSE
│ ├── Makefile
│ ├── README.md
│ ├── extended_testing_deps.txt
│ ├── langchain/
│ │ ├── __init__.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ ├── factory.py
│ │ │ ├── middleware/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _execution.py
│ │ │ │ ├── _redaction.py
│ │ │ │ ├── _retry.py
│ │ │ │ ├── context_editing.py
│ │ │ │ ├── file_search.py
│ │ │ │ ├── human_in_the_loop.py
│ │ │ │ ├── model_call_limit.py
│ │ │ │ ├── model_fallback.py
│ │ │ │ ├── model_retry.py
│ │ │ │ ├── pii.py
│ │ │ │ ├── shell_tool.py
│ │ │ │ ├── summarization.py
│ │ │ │ ├── todo.py
│ │ │ │ ├── tool_call_limit.py
│ │ │ │ ├── tool_emulator.py
│ │ │ │ ├── tool_retry.py
│ │ │ │ ├── tool_selection.py
│ │ │ │ └── types.py
│ │ │ └── structured_output.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── messages/
│ │ │ └── __init__.py
│ │ ├── py.typed
│ │ ├── rate_limiters/
│ │ │ └── __init__.py
│ │ └── tools/
│ │ ├── __init__.py
│ │ └── tool_node.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── check_version.py
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ └── middleware/
│ │ │ ├── __init__.py
│ │ │ └── test_shell_tool_integration.py
│ │ ├── cache/
│ │ │ ├── __init__.py
│ │ │ └── fake_embeddings.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── conftest.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── agents/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ ├── test_middleware_agent.ambr
│ │ │ ├── test_middleware_decorators.ambr
│ │ │ ├── test_middleware_framework.ambr
│ │ │ └── test_return_direct_graph.ambr
│ │ ├── any_str.py
│ │ ├── compose-postgres.yml
│ │ ├── compose-redis.yml
│ │ ├── conftest.py
│ │ ├── conftest_checkpointer.py
│ │ ├── conftest_store.py
│ │ ├── memory_assert.py
│ │ ├── messages.py
│ │ ├── middleware/
│ │ │ ├── __init__.py
│ │ │ ├── __snapshots__/
│ │ │ │ ├── test_middleware_decorators.ambr
│ │ │ │ ├── test_middleware_diagram.ambr
│ │ │ │ └── test_middleware_framework.ambr
│ │ │ ├── core/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── __snapshots__/
│ │ │ │ │ ├── test_decorators.ambr
│ │ │ │ │ ├── test_diagram.ambr
│ │ │ │ │ └── test_framework.ambr
│ │ │ │ ├── test_composition.py
│ │ │ │ ├── test_decorators.py
│ │ │ │ ├── test_diagram.py
│ │ │ │ ├── test_dynamic_tools.py
│ │ │ │ ├── test_framework.py
│ │ │ │ ├── test_overrides.py
│ │ │ │ ├── test_sync_async_wrappers.py
│ │ │ │ ├── test_tools.py
│ │ │ │ ├── test_wrap_model_call.py
│ │ │ │ ├── test_wrap_model_call_state_update.py
│ │ │ │ └── test_wrap_tool_call.py
│ │ │ └── implementations/
│ │ │ ├── __init__.py
│ │ │ ├── test_context_editing.py
│ │ │ ├── test_file_search.py
│ │ │ ├── test_human_in_the_loop.py
│ │ │ ├── test_model_call_limit.py
│ │ │ ├── test_model_fallback.py
│ │ │ ├── test_model_retry.py
│ │ │ ├── test_pii.py
│ │ │ ├── test_shell_execution_policies.py
│ │ │ ├── test_shell_tool.py
│ │ │ ├── test_structured_output_retry.py
│ │ │ ├── test_summarization.py
│ │ │ ├── test_todo.py
│ │ │ ├── test_tool_call_limit.py
│ │ │ ├── test_tool_emulator.py
│ │ │ ├── test_tool_retry.py
│ │ │ └── test_tool_selection.py
│ │ ├── middleware_typing/
│ │ │ ├── __init__.py
│ │ │ ├── test_middleware_backwards_compat.py
│ │ │ ├── test_middleware_type_errors.py
│ │ │ └── test_middleware_typing.py
│ │ ├── model.py
│ │ ├── specifications/
│ │ │ ├── responses.json
│ │ │ └── return_direct.json
│ │ ├── test_agent_name.py
│ │ ├── test_create_agent_tool_validation.py
│ │ ├── test_fetch_last_ai_and_tool_messages.py
│ │ ├── test_injected_runtime_create_agent.py
│ │ ├── test_kwargs_tool_runtime_injection.py
│ │ ├── test_react_agent.py
│ │ ├── test_response_format.py
│ │ ├── test_response_format_integration.py
│ │ ├── test_responses.py
│ │ ├── test_responses_spec.py
│ │ ├── test_return_direct_graph.py
│ │ ├── test_return_direct_spec.py
│ │ ├── test_state_schema.py
│ │ ├── test_system_message.py
│ │ └── utils.py
│ ├── chat_models/
│ │ ├── __init__.py
│ │ └── test_chat_models.py
│ ├── conftest.py
│ ├── embeddings/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ └── test_imports.py
│ ├── test_dependencies.py
│ ├── test_imports.py
│ ├── test_pytest_config.py
│ ├── test_version.py
│ └── tools/
│ ├── __init__.py
│ └── test_imports.py
├── model-profiles/
│ ├── Makefile
│ ├── README.md
│ ├── extended_testing_deps.txt
│ ├── langchain_model_profiles/
│ │ ├── __init__.py
│ │ └── cli.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ └── test_cli.py
├── partners/
│ ├── README.md
│ ├── anthropic/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_anthropic/
│ │ │ ├── __init__.py
│ │ │ ├── _client_utils.py
│ │ │ ├── _compat.py
│ │ │ ├── _version.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _profiles.py
│ │ │ │ └── profile_augmentations.toml
│ │ │ ├── experimental.py
│ │ │ ├── llms.py
│ │ │ ├── middleware/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── anthropic_tools.py
│ │ │ │ ├── bash.py
│ │ │ │ ├── file_search.py
│ │ │ │ └── prompt_caching.py
│ │ │ ├── output_parsers.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ ├── check_version.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_llms.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── _utils.py
│ │ ├── middleware/
│ │ │ ├── __init__.py
│ │ │ ├── test_anthropic_tools.py
│ │ │ ├── test_bash.py
│ │ │ ├── test_file_search.py
│ │ │ └── test_prompt_caching.py
│ │ ├── test_chat_models.py
│ │ ├── test_client_utils.py
│ │ ├── test_imports.py
│ │ ├── test_llms.py
│ │ ├── test_output_parsers.py
│ │ └── test_standard.py
│ ├── deepseek/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_deepseek/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ └── test_compile.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ └── test_chat_models.py
│ ├── exa/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_exa/
│ │ │ ├── __init__.py
│ │ │ ├── _utilities.py
│ │ │ ├── py.typed
│ │ │ ├── retrievers.py
│ │ │ └── tools.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_find_similar_tool.py
│ │ │ ├── test_retriever.py
│ │ │ └── test_search_tool.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── fireworks/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_fireworks/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── embeddings.py
│ │ │ ├── llms.py
│ │ │ ├── py.typed
│ │ │ └── version.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings.py
│ │ │ ├── test_llms.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── test_chat_models.py
│ │ ├── test_embeddings.py
│ │ ├── test_embeddings_standard.py
│ │ ├── test_imports.py
│ │ ├── test_llms.py
│ │ └── test_standard.py
│ ├── groq/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_groq/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── py.typed
│ │ │ └── version.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── __init__.py
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── fake/
│ │ │ ├── __init__.py
│ │ │ └── callbacks.py
│ │ ├── test_chat_models.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── huggingface/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_huggingface/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ └── huggingface.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── huggingface.py
│ │ │ │ └── huggingface_endpoint.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── huggingface_endpoint.py
│ │ │ │ └── huggingface_pipeline.py
│ │ │ ├── py.typed
│ │ │ ├── tests/
│ │ │ │ ├── __init__.py
│ │ │ │ └── integration_tests/
│ │ │ │ └── __init__.py
│ │ │ └── utils/
│ │ │ └── import_utils.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings_standard.py
│ │ │ ├── test_llms.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_chat_models.py
│ │ ├── test_huggingface_endpoint.py
│ │ └── test_huggingface_pipeline.py
│ ├── mistralai/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_mistralai/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── embeddings.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── test_chat_models.py
│ │ ├── test_embeddings.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── nomic/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_nomic/
│ │ │ ├── __init__.py
│ │ │ ├── embeddings.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_compile.py
│ │ │ └── test_embeddings.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_embeddings.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── ollama/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_ollama/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── _utils.py
│ │ │ ├── chat_models.py
│ │ │ ├── embeddings.py
│ │ │ ├── llms.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── cassettes/
│ │ │ │ │ └── test_chat_models_standard/
│ │ │ │ │ └── TestChatOllama.test_stream_time.yaml
│ │ │ │ ├── test_chat_models.py
│ │ │ │ ├── test_chat_models_reasoning.py
│ │ │ │ └── test_chat_models_standard.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings.py
│ │ │ └── test_llms.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_auth.py
│ │ ├── test_chat_models.py
│ │ ├── test_embeddings.py
│ │ ├── test_imports.py
│ │ └── test_llms.py
│ ├── openai/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_openai/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _client_utils.py
│ │ │ │ ├── _compat.py
│ │ │ │ ├── azure.py
│ │ │ │ └── base.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _profiles.py
│ │ │ │ └── profile_augmentations.toml
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── azure.py
│ │ │ │ └── base.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── azure.py
│ │ │ │ └── base.py
│ │ │ ├── middleware/
│ │ │ │ ├── __init__.py
│ │ │ │ └── openai_moderation.py
│ │ │ ├── output_parsers/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tools.py
│ │ │ ├── py.typed
│ │ │ └── tools/
│ │ │ ├── __init__.py
│ │ │ └── custom_tool.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_azure.py
│ │ │ │ ├── test_azure_standard.py
│ │ │ │ ├── test_base.py
│ │ │ │ ├── test_base_standard.py
│ │ │ │ ├── test_responses_api.py
│ │ │ │ └── test_responses_standard.py
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_azure.py
│ │ │ │ ├── test_base.py
│ │ │ │ └── test_base_standard.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_azure.py
│ │ │ │ └── test_base.py
│ │ │ └── test_compile.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ ├── __snapshots__/
│ │ │ │ ├── test_azure_standard.ambr
│ │ │ │ ├── test_base_standard.ambr
│ │ │ │ └── test_responses_standard.ambr
│ │ │ ├── test_azure.py
│ │ │ ├── test_azure_standard.py
│ │ │ ├── test_base.py
│ │ │ ├── test_base_standard.py
│ │ │ ├── test_imports.py
│ │ │ ├── test_prompt_cache_key.py
│ │ │ ├── test_responses_standard.py
│ │ │ └── test_responses_stream.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ ├── test_azure_embeddings.py
│ │ │ ├── test_azure_standard.py
│ │ │ ├── test_base.py
│ │ │ ├── test_base_standard.py
│ │ │ └── test_imports.py
│ │ ├── fake/
│ │ │ ├── __init__.py
│ │ │ └── callbacks.py
│ │ ├── llms/
│ │ │ ├── __init__.py
│ │ │ ├── test_azure.py
│ │ │ ├── test_base.py
│ │ │ └── test_imports.py
│ │ ├── middleware/
│ │ │ ├── __init__.py
│ │ │ └── test_openai_moderation_middleware.py
│ │ ├── test_imports.py
│ │ ├── test_load.py
│ │ ├── test_secrets.py
│ │ ├── test_token_counts.py
│ │ └── test_tools.py
│ ├── openrouter/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_openrouter/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── __init__.py
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── test_chat_models.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── perplexity/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_perplexity/
│ │ │ ├── __init__.py
│ │ │ ├── _utils.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _profiles.py
│ │ │ │ └── profile_augmentations.toml
│ │ │ ├── output_parsers.py
│ │ │ ├── py.typed
│ │ │ ├── retrievers.py
│ │ │ ├── tools.py
│ │ │ └── types.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_chat_models_standard.py
│ │ │ ├── test_compile.py
│ │ │ └── test_search_api.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_chat_models.py
│ │ ├── test_chat_models_standard.py
│ │ ├── test_imports.py
│ │ ├── test_output_parsers.py
│ │ ├── test_retrievers.py
│ │ ├── test_secrets.py
│ │ └── test_tools.py
│ ├── qdrant/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_qdrant/
│ │ │ ├── __init__.py
│ │ │ ├── _utils.py
│ │ │ ├── fastembed_sparse.py
│ │ │ ├── py.typed
│ │ │ ├── qdrant.py
│ │ │ ├── sparse_embeddings.py
│ │ │ └── vectorstores.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── async_api/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_add_texts.py
│ │ │ │ ├── test_from_texts.py
│ │ │ │ ├── test_max_marginal_relevance.py
│ │ │ │ └── test_similarity_search.py
│ │ │ ├── common.py
│ │ │ ├── conftest.py
│ │ │ ├── fastembed/
│ │ │ │ ├── __init__.py
│ │ │ │ └── test_fastembed_sparse.py
│ │ │ ├── fixtures.py
│ │ │ ├── qdrant_vector_store/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_add_texts.py
│ │ │ │ ├── test_from_existing.py
│ │ │ │ ├── test_from_texts.py
│ │ │ │ ├── test_mmr.py
│ │ │ │ └── test_search.py
│ │ │ ├── test_add_texts.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embedding_interface.py
│ │ │ ├── test_from_existing_collection.py
│ │ │ ├── test_from_texts.py
│ │ │ ├── test_max_marginal_relevance.py
│ │ │ └── test_similarity_search.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ ├── test_standard.py
│ │ └── test_vectorstores.py
│ └── xai/
│ ├── LICENSE
│ ├── Makefile
│ ├── README.md
│ ├── langchain_xai/
│ │ ├── __init__.py
│ │ ├── chat_models.py
│ │ ├── data/
│ │ │ ├── __init__.py
│ │ │ └── _profiles.py
│ │ └── py.typed
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ ├── test_chat_models.py
│ │ ├── test_chat_models_standard.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── __snapshots__/
│ │ └── test_chat_models_standard.ambr
│ ├── test_chat_models.py
│ ├── test_chat_models_standard.py
│ ├── test_imports.py
│ └── test_secrets.py
├── standard-tests/
│ ├── Makefile
│ ├── README.md
│ ├── langchain_tests/
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── base_store.py
│ │ │ ├── cache.py
│ │ │ ├── chat_models.py
│ │ │ ├── embeddings.py
│ │ │ ├── indexer.py
│ │ │ ├── retrievers.py
│ │ │ ├── sandboxes.py
│ │ │ ├── tools.py
│ │ │ └── vectorstores.py
│ │ ├── py.typed
│ │ ├── unit_tests/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models.py
│ │ │ ├── embeddings.py
│ │ │ └── tools.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ └── pydantic.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── custom_chat_model.py
│ ├── test_basic_retriever.py
│ ├── test_basic_tool.py
│ ├── test_custom_chat_model.py
│ ├── test_decorated_tool.py
│ ├── test_embeddings.py
│ ├── test_in_memory_base_store.py
│ ├── test_in_memory_cache.py
│ └── test_in_memory_vectorstore.py
└── text-splitters/
├── Makefile
├── README.md
├── extended_testing_deps.txt
├── langchain_text_splitters/
│ ├── __init__.py
│ ├── base.py
│ ├── character.py
│ ├── html.py
│ ├── json.py
│ ├── jsx.py
│ ├── konlpy.py
│ ├── latex.py
│ ├── markdown.py
│ ├── nltk.py
│ ├── py.typed
│ ├── python.py
│ ├── sentence_transformers.py
│ ├── spacy.py
│ └── xsl/
│ └── converting_to_header.xslt
├── pyproject.toml
├── scripts/
│ ├── check_imports.py
│ └── lint_imports.sh
└── tests/
├── __init__.py
├── integration_tests/
│ ├── __init__.py
│ ├── test_compile.py
│ ├── test_nlp_text_splitters.py
│ └── test_text_splitter.py
├── test_data/
│ └── test_splitter.xslt
└── unit_tests/
├── __init__.py
├── conftest.py
├── test_html_security.py
└── test_text_splitters.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .devcontainer/README.md
================================================
# Dev container
This project includes a [dev container](https://containers.dev/), which lets you use a container as a full-featured dev environment.
You can use the dev container configuration in this folder to build and run the app without needing to install any of its tools locally! You can use it in [GitHub Codespaces](https://github.com/features/codespaces) or the [VS Code Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers).
## GitHub Codespaces
[](https://codespaces.new/langchain-ai/langchain)
You may use the button above, or follow these steps to open this repo in a Codespace:
1. Click the **Code** drop-down menu at the top of <https://github.com/langchain-ai/langchain>.
1. Click on the **Codespaces** tab.
1. Click **Create codespace on master**.
For more info, check out the [GitHub documentation](https://docs.github.com/en/free-pro-team@latest/github/developing-online-with-codespaces/creating-a-codespace#creating-a-codespace).
## VS Code Dev Containers
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/langchain-ai/langchain)
> [!NOTE]
> If you click the link above you will open the main repo (`langchain-ai/langchain`) and *not* your local cloned repo. This is fine if you only want to run and test the library, but if you want to contribute you can use the link below and replace with your username and cloned repo name:
```txt
https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/<YOUR_USERNAME>/<YOUR_CLONED_REPO_NAME>
```
Then you will have a local cloned repo where you can contribute and then create pull requests.
If you already have VS Code and Docker installed, you can use the button above to get started. This will use VSCode to automatically install the Dev Containers extension if needed, clone the source code into a container volume, and spin up a dev container for use.
Alternatively you can also follow these steps to open this repo in a container using the VS Code Dev Containers extension:
1. If this is your first time using a development container, please ensure your system meets the pre-reqs (i.e. have Docker installed) in the [getting started steps](https://aka.ms/vscode-remote/containers/getting-started).
2. Open a locally cloned copy of the code:
- Fork and Clone this repository to your local filesystem.
- Press <kbd>F1</kbd> and select the **Dev Containers: Open Folder in Container...** command.
- Select the cloned copy of this folder, wait for the container to start, and try things out!
You can learn more in the [Dev Containers documentation](https://code.visualstudio.com/docs/devcontainers/containers).
## Tips and tricks
- If you are working with the same repository folder in a container and Windows, you'll want consistent line endings (otherwise you may see hundreds of changes in the SCM view). The `.gitattributes` file in the root of this repo will disable line ending conversion and should prevent this. See [tips and tricks](https://code.visualstudio.com/docs/devcontainers/tips-and-tricks#_resolving-git-line-ending-issues-in-containers-resulting-in-many-modified-files) for more info.
- If you'd like to review the contents of the image used in this dev container, you can check it out in the [devcontainers/images](https://github.com/devcontainers/images/tree/main/src/python) repo.
================================================
FILE: .devcontainer/devcontainer.json
================================================
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-docker-compose
{
// Name for the dev container
"name": "langchain",
// Point to a Docker Compose file
"dockerComposeFile": "./docker-compose.yaml",
// Required when using Docker Compose. The name of the service to connect to once running
"service": "langchain",
// The optional 'workspaceFolder' property is the path VS Code should open by default when
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
"workspaceFolder": "/workspaces/langchain",
"mounts": [
"source=langchain-workspaces,target=/workspaces/langchain,type=volume"
],
// Prevent the container from shutting down
"overrideCommand": true,
// Features to add to the dev container. More info: https://containers.dev/features
"features": {
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/github-cli:1": {}
},
"containerEnv": {
"UV_LINK_MODE": "copy"
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Run commands after the container is created
"postCreateCommand": "cd libs/langchain_v1 && uv sync && echo 'LangChain (Python) dev environment ready!'",
// Configure tool-specific properties.
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.debugpy",
"ms-python.mypy-type-checker",
"ms-python.isort",
"unifiedjs.vscode-mdx",
"davidanson.vscode-markdownlint",
"ms-toolsai.jupyter",
"GitHub.copilot",
"GitHub.copilot-chat"
],
"settings": {
"python.defaultInterpreterPath": "libs/langchain_v1/.venv/bin/python",
"python.formatting.provider": "none",
"[python]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
}
}
}
}
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}
================================================
FILE: .devcontainer/docker-compose.yaml
================================================
version: '3'
services:
langchain:
build:
dockerfile: libs/langchain/dev.Dockerfile
context: ..
networks:
- langchain-network
networks:
langchain-network:
driver: bridge
================================================
FILE: .dockerignore
================================================
# Git
.git
.github
# Python
__pycache__
*.pyc
*.pyo
.venv
.mypy_cache
.pytest_cache
.ruff_cache
*.egg-info
.tox
# IDE
.idea
.vscode
# Worktree
worktree
# Test artifacts
.coverage
htmlcov
coverage.xml
# Build artifacts
dist
build
# Misc
*.log
.DS_Store
================================================
FILE: .editorconfig
================================================
# top-most EditorConfig file
root = true
# All files
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
# Python files
[*.py]
indent_style = space
indent_size = 4
max_line_length = 88
# JSON files
[*.json]
indent_style = space
indent_size = 2
# YAML files
[*.{yml,yaml}]
indent_style = space
indent_size = 2
# Markdown files
[*.md]
indent_style = space
indent_size = 2
trim_trailing_whitespace = false
# Configuration files
[*.{toml,ini,cfg}]
indent_style = space
indent_size = 4
# Shell scripts
[*.sh]
indent_style = space
indent_size = 2
# Makefile
[Makefile]
indent_style = tab
indent_size = 4
# Jupyter notebooks
[*.ipynb]
# Jupyter may include trailing whitespace in cell
# outputs that's semantically meaningful
trim_trailing_whitespace = false
================================================
FILE: .gitattributes
================================================
* text=auto eol=lf
*.{cmd,[cC][mM][dD]} text eol=crlf
*.{bat,[bB][aA][tT]} text eol=crlf
================================================
FILE: .github/CODEOWNERS
================================================
/.github/ @ccurme @eyurtsev @mdrxy
/libs/core/ @eyurtsev
/libs/partners/ @ccurme @mdrxy
================================================
FILE: .github/ISSUE_TEMPLATE/bug-report.yml
================================================
name: "\U0001F41B Bug Report"
description: Report a bug in LangChain. To report a security issue, please instead use the security option (below). For questions, please use the LangChain forum (below).
labels: ["bug"]
type: bug
body:
- type: markdown
attributes:
value: |
> **All contributions must be in English.** See the [language policy](https://docs.langchain.com/oss/python/contributing/overview#language-policy).
Thank you for taking the time to file a bug report.
For usage questions, feature requests and general design questions, please use the [LangChain Forum](https://forum.langchain.com/).
Check these before submitting to see if your issue has already been reported, fixed or if there's another way to solve your problem:
* [Documentation](https://docs.langchain.com/oss/python/langchain/overview),
* [API Reference Documentation](https://reference.langchain.com/python/),
* [LangChain ChatBot](https://chat.langchain.com/)
* [GitHub search](https://github.com/langchain-ai/langchain),
* [LangChain Forum](https://forum.langchain.com/),
- type: checkboxes
id: checks
attributes:
label: Checked other resources
description: Please confirm and check all the following options.
options:
- label: This is a bug, not a usage question.
required: true
- label: I added a clear and descriptive title that summarizes this issue.
required: true
- label: I used the GitHub search to find a similar question and didn't find it.
required: true
- label: I am sure that this is a bug in LangChain rather than my code.
required: true
- label: The bug is not resolved by updating to the latest stable version of LangChain (or the specific integration package).
required: true
- label: This is not related to the langchain-community package.
required: true
- label: I posted a self-contained, minimal, reproducible example. A maintainer can copy it and run it AS IS.
required: true
- type: checkboxes
id: package
attributes:
label: Package (Required)
description: |
Which `langchain` package(s) is this bug related to? Select at least one.
Note that if the package you are reporting for is not listed here, it is not in this repository (e.g. `langchain-google-genai` is in [`langchain-ai/langchain-google`](https://github.com/langchain-ai/langchain-google/)).
Please report issues for other packages to their respective repositories.
options:
- label: langchain
- label: langchain-openai
- label: langchain-anthropic
- label: langchain-classic
- label: langchain-core
- label: langchain-model-profiles
- label: langchain-tests
- label: langchain-text-splitters
- label: langchain-chroma
- label: langchain-deepseek
- label: langchain-exa
- label: langchain-fireworks
- label: langchain-groq
- label: langchain-huggingface
- label: langchain-mistralai
- label: langchain-nomic
- label: langchain-ollama
- label: langchain-openrouter
- label: langchain-perplexity
- label: langchain-qdrant
- label: langchain-xai
- label: Other / not sure / general
- type: textarea
id: related
validations:
required: false
attributes:
label: Related Issues / PRs
description: |
If this bug is related to any existing issues or pull requests, please link them here.
placeholder: |
* e.g. #123, #456
- type: textarea
id: reproduction
validations:
required: true
attributes:
label: Reproduction Steps / Example Code (Python)
description: |
Please add a self-contained, [minimal, reproducible, example](https://stackoverflow.com/help/minimal-reproducible-example) with your use case.
If a maintainer can copy it, run it, and see it right away, there's a much higher chance that you'll be able to get help.
**Important!**
* Avoid screenshots, as they are hard to read and (more importantly) don't allow others to copy-and-paste your code.
* Reduce your code to the minimum required to reproduce the issue if possible.
(This will be automatically formatted into code, so no need for backticks.)
render: python
placeholder: |
from langchain_core.runnables import RunnableLambda
def bad_code(inputs) -> int:
raise NotImplementedError('For demo purpose')
chain = RunnableLambda(bad_code)
chain.invoke('Hello!')
- type: textarea
attributes:
label: Error Message and Stack Trace (if applicable)
description: |
If you are reporting an error, please copy and paste the full error message and
stack trace.
(This will be automatically formatted into code, so no need for backticks.)
render: shell
- type: textarea
id: description
attributes:
label: Description
description: |
What is the problem, question, or error?
Write a short description telling what you are doing, what you expect to happen, and what is currently happening.
placeholder: |
* I'm trying to use the `langchain` library to do X.
* I expect to see Y.
* Instead, it does Z.
validations:
required: true
- type: textarea
id: system-info
attributes:
label: System Info
description: |
Please share your system info with us.
Run the following command in your terminal and paste the output here:
`python -m langchain_core.sys_info`
or if you have an existing python interpreter running:
```python
from langchain_core import sys_info
sys_info.print_sys_info()
```
placeholder: |
python -m langchain_core.sys_info
validations:
required: true
================================================
FILE: .github/ISSUE_TEMPLATE/config.yml
================================================
blank_issues_enabled: false
version: 2.1
contact_links:
- name: 💬 LangChain Forum
url: https://forum.langchain.com/
about: General community discussions and support
- name: 📚 LangChain Documentation
url: https://docs.langchain.com/oss/python/langchain/overview
about: View the official LangChain documentation
- name: 📚 API Reference Documentation
url: https://reference.langchain.com/python/
about: View the official LangChain API reference documentation
- name: 📚 Documentation issue
url: https://github.com/langchain-ai/docs/issues/new?template=01-langchain.yml
about: Report an issue related to the LangChain documentation
================================================
FILE: .github/ISSUE_TEMPLATE/feature-request.yml
================================================
name: "✨ Feature Request"
description: Request a new feature or enhancement for LangChain. For questions, please use the LangChain forum (below).
labels: ["feature request"]
type: feature
body:
- type: markdown
attributes:
value: |
> **All contributions must be in English.** See the [language policy](https://docs.langchain.com/oss/python/contributing/overview#language-policy).
Thank you for taking the time to request a new feature.
Use this to request NEW FEATURES or ENHANCEMENTS in LangChain. For bug reports, please use the bug report template. For usage questions and general design questions, please use the [LangChain Forum](https://forum.langchain.com/).
Relevant links to check before filing a feature request to see if your request has already been made or
if there's another way to achieve what you want:
* [Documentation](https://docs.langchain.com/oss/python/langchain/overview),
* [API Reference Documentation](https://reference.langchain.com/python/),
* [LangChain ChatBot](https://chat.langchain.com/)
* [GitHub search](https://github.com/langchain-ai/langchain),
* [LangChain Forum](https://forum.langchain.com/),
**Note:** Do not begin work on a PR unless explicitly assigned to this issue by a maintainer.
- type: checkboxes
id: checks
attributes:
label: Checked other resources
description: Please confirm and check all the following options.
options:
- label: This is a feature request, not a bug report or usage question.
required: true
- label: I added a clear and descriptive title that summarizes the feature request.
required: true
- label: I used the GitHub search to find a similar feature request and didn't find it.
required: true
- label: I checked the LangChain documentation and API reference to see if this feature already exists.
required: true
- label: This is not related to the langchain-community package.
required: true
- type: checkboxes
id: package
attributes:
label: Package (Required)
description: |
Which `langchain` package(s) is this request related to? Select at least one.
Note that if the package you are requesting for is not listed here, it is not in this repository (e.g. `langchain-google-genai` is in `langchain-ai/langchain`).
Please submit feature requests for other packages to their respective repositories.
options:
- label: langchain
- label: langchain-openai
- label: langchain-anthropic
- label: langchain-classic
- label: langchain-core
- label: langchain-model-profiles
- label: langchain-tests
- label: langchain-text-splitters
- label: langchain-chroma
- label: langchain-deepseek
- label: langchain-exa
- label: langchain-fireworks
- label: langchain-groq
- label: langchain-huggingface
- label: langchain-mistralai
- label: langchain-nomic
- label: langchain-ollama
- label: langchain-openrouter
- label: langchain-perplexity
- label: langchain-qdrant
- label: langchain-xai
- label: Other / not sure / general
- type: textarea
id: feature-description
validations:
required: true
attributes:
label: Feature Description
description: |
Please provide a clear and concise description of the feature you would like to see added to LangChain.
What specific functionality are you requesting? Be as detailed as possible.
placeholder: |
I would like LangChain to support...
This feature would allow users to...
- type: textarea
id: use-case
validations:
required: true
attributes:
label: Use Case
description: |
Describe the specific use case or problem this feature would solve.
Why do you need this feature? What problem does it solve for you or other users?
placeholder: |
I'm trying to build an application that...
Currently, I have to work around this by...
This feature would help me/users to...
- type: textarea
id: proposed-solution
validations:
required: false
attributes:
label: Proposed Solution
description: |
If you have ideas about how this feature could be implemented, please describe them here.
This is optional but can be helpful for maintainers to understand your vision.
placeholder: |
I think this could be implemented by...
The API could look like...
```python
# Example of how the feature might work
```
- type: textarea
id: alternatives
validations:
required: false
attributes:
label: Alternatives Considered
description: |
Have you considered any alternative solutions or workarounds?
What other approaches have you tried or considered?
placeholder: |
I've tried using...
Alternative approaches I considered:
1. ...
2. ...
But these don't work because...
- type: textarea
id: additional-context
validations:
required: false
attributes:
label: Additional Context
description: |
Add any other context, screenshots, examples, or references that would help explain your feature request.
placeholder: |
Related issues: #...
Similar features in other libraries:
- ...
Additional context or examples:
- ...
================================================
FILE: .github/ISSUE_TEMPLATE/privileged.yml
================================================
name: 🔒 Privileged
description: You are a LangChain maintainer, or was asked directly by a maintainer to create an issue here. If not, check the other options.
body:
- type: markdown
attributes:
value: |
If you are not a LangChain maintainer, employee, or were not asked directly by a maintainer to create an issue, then please start the conversation on the [LangChain Forum](https://forum.langchain.com/) instead.
- type: checkboxes
id: privileged
attributes:
label: Privileged issue
description: Confirm that you are allowed to create an issue here.
options:
- label: I am a LangChain maintainer, or was asked directly by a LangChain maintainer to create an issue here.
required: true
- type: textarea
id: content
attributes:
label: Issue Content
description: Add the content of the issue here.
- type: checkboxes
id: package
attributes:
label: Package (Required)
description: |
Please select package(s) that this issue is related to.
options:
- label: langchain
- label: langchain-openai
- label: langchain-anthropic
- label: langchain-classic
- label: langchain-core
- label: langchain-model-profiles
- label: langchain-tests
- label: langchain-text-splitters
- label: langchain-chroma
- label: langchain-deepseek
- label: langchain-exa
- label: langchain-fireworks
- label: langchain-groq
- label: langchain-huggingface
- label: langchain-mistralai
- label: langchain-nomic
- label: langchain-ollama
- label: langchain-openrouter
- label: langchain-perplexity
- label: langchain-qdrant
- label: langchain-xai
- label: Other / not sure / general
================================================
FILE: .github/ISSUE_TEMPLATE/task.yml
================================================
name: "📋 Task"
description: Create a task for project management and tracking by LangChain maintainers. If you are not a maintainer, please use other templates or the forum.
labels: ["task"]
type: task
body:
- type: markdown
attributes:
value: |
Thanks for creating a task to help organize LangChain development.
This template is for **maintainer tasks** such as project management, development planning, refactoring, documentation updates, and other organizational work.
If you are not a LangChain maintainer or were not asked directly by a maintainer to create a task, then please start the conversation on the [LangChain Forum](https://forum.langchain.com/) instead or use the appropriate bug report or feature request templates on the previous page.
- type: checkboxes
id: maintainer
attributes:
label: Maintainer task
description: Confirm that you are allowed to create a task here.
options:
- label: I am a LangChain maintainer, or was asked directly by a LangChain maintainer to create a task here.
required: true
- type: textarea
id: task-description
attributes:
label: Task Description
description: |
Provide a clear and detailed description of the task.
What needs to be done? Be specific about the scope and requirements.
placeholder: |
This task involves...
The goal is to...
Specific requirements:
- ...
- ...
validations:
required: true
- type: textarea
id: acceptance-criteria
attributes:
label: Acceptance Criteria
description: |
Define the criteria that must be met for this task to be considered complete.
What are the specific deliverables or outcomes expected?
placeholder: |
This task will be complete when:
- [ ] ...
- [ ] ...
- [ ] ...
validations:
required: true
- type: textarea
id: context
attributes:
label: Context and Background
description: |
Provide any relevant context, background information, or links to related issues/PRs.
Why is this task needed? What problem does it solve?
placeholder: |
Background:
- ...
Related issues/PRs:
- #...
Additional context:
- ...
validations:
required: false
- type: textarea
id: dependencies
attributes:
label: Dependencies
description: |
List any dependencies or blockers for this task.
Are there other tasks, issues, or external factors that need to be completed first?
placeholder: |
This task depends on:
- [ ] Issue #...
- [ ] PR #...
- [ ] External dependency: ...
Blocked by:
- ...
validations:
required: false
- type: checkboxes
id: package
attributes:
label: Package (Required)
description: |
Please select package(s) that this task is related to.
options:
- label: langchain
- label: langchain-openai
- label: langchain-anthropic
- label: langchain-classic
- label: langchain-core
- label: langchain-model-profiles
- label: langchain-tests
- label: langchain-text-splitters
- label: langchain-chroma
- label: langchain-deepseek
- label: langchain-exa
- label: langchain-fireworks
- label: langchain-groq
- label: langchain-huggingface
- label: langchain-mistralai
- label: langchain-nomic
- label: langchain-ollama
- label: langchain-openrouter
- label: langchain-perplexity
- label: langchain-qdrant
- label: langchain-xai
- label: Other / not sure / general
================================================
FILE: .github/PULL_REQUEST_TEMPLATE.md
================================================
Fixes #
<!-- Replace everything above this line with a 1-2 sentence description of your change. Keep the "Fixes #xx" keyword and update the issue number. -->
Read the full contributing guidelines: https://docs.langchain.com/oss/python/contributing/overview
> **All contributions must be in English.** See the [language policy](https://docs.langchain.com/oss/python/contributing/overview#language-policy).
If you paste a large clearly AI generated description here your PR may be IGNORED or CLOSED!
Thank you for contributing to LangChain! Follow these steps to have your pull request considered as ready for review.
1. PR title: Should follow the format: TYPE(SCOPE): DESCRIPTION
- Examples:
- fix(anthropic): resolve flag parsing error
- feat(core): add multi-tenant support
- test(openai): update API usage tests
- Allowed TYPE and SCOPE values: https://github.com/langchain-ai/langchain/blob/master/.github/workflows/pr_lint.yml#L15-L33
2. PR description:
- Write 1-2 sentences summarizing the change.
- The `Fixes #xx` line at the top is **required** for external contributions — update the issue number and keep the keyword. This links your PR to the approved issue and auto-closes it on merge.
- If there are any breaking changes, please clearly describe them.
- If this PR depends on another PR being merged first, please include "Depends on #PR_NUMBER" in the description.
3. Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified.
- We will not consider a PR unless these three are passing in CI.
4. How did you verify your code works?
Additional guidelines:
- All external PRs must link to an issue or discussion where a solution has been approved by a maintainer, and you must be assigned to that issue. PRs without prior approval will be closed.
- PRs should not touch more than one package unless absolutely necessary.
- Do not update the `uv.lock` files or add dependencies to `pyproject.toml` files (even optional ones) unless you have explicit permission to do so by a maintainer.
## Social handles (optional)
<!-- If you'd like a shoutout on release, add your socials below -->
Twitter: @
LinkedIn: https://linkedin.com/in/
================================================
FILE: .github/actions/uv_setup/action.yml
================================================
# Helper to set up Python and uv with caching
name: uv-install
description: Set up Python and uv with caching
inputs:
python-version:
description: Python version, supporting MAJOR.MINOR only
required: true
enable-cache:
description: Enable caching for uv dependencies
required: false
default: "true"
cache-suffix:
description: Custom cache key suffix for cache invalidation
required: false
default: ""
working-directory:
description: Working directory for cache glob scoping
required: false
default: "**"
env:
UV_VERSION: "0.5.25"
runs:
using: composite
steps:
- name: Install uv and set the python version
uses: astral-sh/setup-uv@0ca8f610542aa7f4acaf39e65cf4eb3c35091883 # v7
with:
version: ${{ env.UV_VERSION }}
python-version: ${{ inputs.python-version }}
enable-cache: ${{ inputs.enable-cache }}
cache-dependency-glob: |
${{ inputs.working-directory }}/pyproject.toml
${{ inputs.working-directory }}/uv.lock
${{ inputs.working-directory }}/requirements*.txt
cache-suffix: ${{ inputs.cache-suffix }}
================================================
FILE: .github/dependabot.yml
================================================
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
# and
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
groups:
minor-and-patch:
patterns:
- "*"
update-types:
- "minor"
- "patch"
major:
patterns:
- "*"
update-types:
- "major"
- package-ecosystem: "uv"
directories:
- "/libs/core/"
- "/libs/langchain/"
- "/libs/langchain_v1/"
schedule:
interval: "monthly"
groups:
minor-and-patch:
patterns:
- "*"
update-types:
- "minor"
- "patch"
major:
patterns:
- "*"
update-types:
- "major"
- package-ecosystem: "uv"
directories:
- "/libs/partners/anthropic/"
- "/libs/partners/chroma/"
- "/libs/partners/deepseek/"
- "/libs/partners/exa/"
- "/libs/partners/fireworks/"
- "/libs/partners/groq/"
- "/libs/partners/huggingface/"
- "/libs/partners/mistralai/"
- "/libs/partners/nomic/"
- "/libs/partners/ollama/"
- "/libs/partners/openai/"
- "/libs/partners/openrouter/"
- "/libs/partners/perplexity/"
- "/libs/partners/qdrant/"
- "/libs/partners/xai/"
schedule:
interval: "monthly"
groups:
minor-and-patch:
patterns:
- "*"
update-types:
- "minor"
- "patch"
major:
patterns:
- "*"
update-types:
- "major"
- package-ecosystem: "uv"
directories:
- "/libs/text-splitters/"
- "/libs/standard-tests/"
- "/libs/model-profiles/"
schedule:
interval: "monthly"
groups:
minor-and-patch:
patterns:
- "*"
update-types:
- "minor"
- "patch"
major:
patterns:
- "*"
update-types:
- "major"
================================================
FILE: .github/scripts/check_diff.py
================================================
"""Analyze git diffs to determine which directories need to be tested.
Intelligently determines which LangChain packages and directories need to be tested,
linted, or built based on the changes. Handles dependency relationships between
packages, maps file changes to appropriate CI job configurations, and outputs JSON
configurations for GitHub Actions.
- Maps changed files to affected package directories (libs/core, libs/partners/*, etc.)
- Builds dependency graph to include dependent packages when core components change
- Generates test matrix configurations with appropriate Python versions
- Handles special cases for Pydantic version testing and performance benchmarks
Used as part of the check_diffs workflow.
"""
import glob
import json
import os
import sys
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Set
import tomllib
from get_min_versions import get_min_version_from_toml
from packaging.requirements import Requirement
LANGCHAIN_DIRS = [
"libs/core",
"libs/text-splitters",
"libs/langchain",
"libs/langchain_v1",
"libs/model-profiles",
]
# When set to True, we are ignoring core dependents
# in order to be able to get CI to pass for each individual
# package that depends on core
# e.g. if you touch core, we don't then add textsplitters/etc to CI
IGNORE_CORE_DEPENDENTS = False
# ignored partners are removed from dependents
# but still run if directly edited
IGNORED_PARTNERS = [
# remove huggingface from dependents because of CI instability
# specifically in huggingface jobs
"huggingface",
]
def all_package_dirs() -> Set[str]:
return {
"/".join(path.split("/")[:-1]).lstrip("./")
for path in glob.glob("./libs/**/pyproject.toml", recursive=True)
if "libs/standard-tests" not in path
}
def dependents_graph() -> dict:
"""Construct a mapping of package -> dependents
Done such that we can run tests on all dependents of a package when a change is made.
"""
dependents = defaultdict(set)
for path in glob.glob("./libs/**/pyproject.toml", recursive=True):
if "template" in path:
continue
# load regular and test deps from pyproject.toml
with open(path, "rb") as f:
pyproject = tomllib.load(f)
pkg_dir = "libs" + "/".join(path.split("libs")[1].split("/")[:-1])
for dep in [
*pyproject["project"]["dependencies"],
*pyproject["dependency-groups"]["test"],
]:
requirement = Requirement(dep)
package_name = requirement.name
if "langchain" in dep:
dependents[package_name].add(pkg_dir)
continue
# load extended deps from extended_testing_deps.txt
package_path = Path(path).parent
extended_requirement_path = package_path / "extended_testing_deps.txt"
if extended_requirement_path.exists():
with open(extended_requirement_path, "r") as f:
extended_deps = f.read().splitlines()
for depline in extended_deps:
if depline.startswith("-e "):
# editable dependency
assert depline.startswith("-e ../partners/"), (
"Extended test deps should only editable install partner packages"
)
partner = depline.split("partners/")[1]
dep = f"langchain-{partner}"
else:
dep = depline.split("==")[0]
if "langchain" in dep:
dependents[dep].add(pkg_dir)
for k in dependents:
for partner in IGNORED_PARTNERS:
if f"libs/partners/{partner}" in dependents[k]:
dependents[k].remove(f"libs/partners/{partner}")
return dependents
def add_dependents(dirs_to_eval: Set[str], dependents: dict) -> List[str]:
updated = set()
for dir_ in dirs_to_eval:
# handle core manually because it has so many dependents
if "core" in dir_:
updated.add(dir_)
continue
pkg = "langchain-" + dir_.split("/")[-1]
updated.update(dependents[pkg])
updated.add(dir_)
return list(updated)
def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
if job == "test-pydantic":
return _get_pydantic_test_configs(dir_)
if job == "codspeed":
# CPU simulation (<1% variance, Valgrind-based) is the default.
# Partners with heavy SDK inits use walltime instead to keep CI fast.
CODSPEED_WALLTIME_DIRS = {
"libs/core",
"libs/partners/fireworks", # ~328s under simulation
"libs/partners/openai", # 6 benchmarks, ~6 min under simulation
}
mode = "walltime" if dir_ in CODSPEED_WALLTIME_DIRS else "simulation"
return [
{
"working-directory": dir_,
"python-version": "3.13",
"codspeed-mode": mode,
}
]
if dir_ == "libs/core":
py_versions = ["3.10", "3.11", "3.12", "3.13", "3.14"]
else:
py_versions = ["3.10", "3.14"]
return [{"working-directory": dir_, "python-version": py_v} for py_v in py_versions]
def _get_pydantic_test_configs(
dir_: str, *, python_version: str = "3.12"
) -> List[Dict[str, str]]:
with open("./libs/core/uv.lock", "rb") as f:
core_uv_lock_data = tomllib.load(f)
for package in core_uv_lock_data["package"]:
if package["name"] == "pydantic":
core_max_pydantic_minor = package["version"].split(".")[1]
break
with open(f"./{dir_}/uv.lock", "rb") as f:
dir_uv_lock_data = tomllib.load(f)
for package in dir_uv_lock_data["package"]:
if package["name"] == "pydantic":
dir_max_pydantic_minor = package["version"].split(".")[1]
break
core_min_pydantic_version = get_min_version_from_toml(
"./libs/core/pyproject.toml", "release", python_version, include=["pydantic"]
)["pydantic"]
core_min_pydantic_minor = (
core_min_pydantic_version.split(".")[1]
if "." in core_min_pydantic_version
else "0"
)
dir_min_pydantic_version = get_min_version_from_toml(
f"./{dir_}/pyproject.toml", "release", python_version, include=["pydantic"]
).get("pydantic", "0.0.0")
dir_min_pydantic_minor = (
dir_min_pydantic_version.split(".")[1]
if "." in dir_min_pydantic_version
else "0"
)
max_pydantic_minor = min(
int(dir_max_pydantic_minor),
int(core_max_pydantic_minor),
)
min_pydantic_minor = max(
int(dir_min_pydantic_minor),
int(core_min_pydantic_minor),
)
configs = [
{
"working-directory": dir_,
"pydantic-version": f"2.{v}.0",
"python-version": python_version,
}
for v in range(min_pydantic_minor, max_pydantic_minor + 1)
]
return configs
def _get_configs_for_multi_dirs(
job: str, dirs_to_run: Dict[str, Set[str]], dependents: dict
) -> List[Dict[str, str]]:
if job == "lint":
dirs = add_dependents(
dirs_to_run["lint"] | dirs_to_run["test"] | dirs_to_run["extended-test"],
dependents,
)
elif job in ["test", "compile-integration-tests", "dependencies", "test-pydantic"]:
dirs = add_dependents(
dirs_to_run["test"] | dirs_to_run["extended-test"], dependents
)
elif job == "extended-tests":
dirs = list(dirs_to_run["extended-test"])
elif job == "codspeed":
dirs = list(dirs_to_run["codspeed"])
else:
raise ValueError(f"Unknown job: {job}")
return [
config for dir_ in dirs for config in _get_configs_for_single_dir(job, dir_)
]
if __name__ == "__main__":
files = sys.argv[1:]
dirs_to_run: Dict[str, set] = {
"lint": set(),
"test": set(),
"extended-test": set(),
"codspeed": set(),
}
docs_edited = False
if len(files) >= 300:
# max diff length is 300 files - there are likely files missing
dirs_to_run["lint"] = all_package_dirs()
dirs_to_run["test"] = all_package_dirs()
dirs_to_run["extended-test"] = set(LANGCHAIN_DIRS)
for file in files:
if any(
file.startswith(dir_)
for dir_ in (
".github/workflows",
".github/tools",
".github/actions",
".github/scripts/check_diff.py",
)
):
# Infrastructure changes (workflows, actions, CI scripts) trigger tests on
# all core packages as a safety measure. This ensures that changes to CI/CD
# infrastructure don't inadvertently break package testing, even if the change
# appears unrelated (e.g., documentation build workflows). This is intentionally
# conservative to catch unexpected side effects from workflow modifications.
#
# Example: A PR modifying .github/workflows/api_doc_build.yml will trigger
# lint/test jobs for libs/core, libs/text-splitters, libs/langchain, and
# libs/langchain_v1, even though the workflow may only affect documentation.
dirs_to_run["extended-test"].update(LANGCHAIN_DIRS)
if file.startswith("libs/core"):
dirs_to_run["codspeed"].add("libs/core")
if any(file.startswith(dir_) for dir_ in LANGCHAIN_DIRS):
# add that dir and all dirs after in LANGCHAIN_DIRS
# for extended testing
found = False
for dir_ in LANGCHAIN_DIRS:
if dir_ == "libs/core" and IGNORE_CORE_DEPENDENTS:
dirs_to_run["extended-test"].add(dir_)
continue
if file.startswith(dir_):
found = True
if found:
dirs_to_run["extended-test"].add(dir_)
elif file.startswith("libs/standard-tests"):
# TODO: update to include all packages that rely on standard-tests (all partner packages)
# Note: won't run on external repo partners
dirs_to_run["lint"].add("libs/standard-tests")
dirs_to_run["test"].add("libs/standard-tests")
dirs_to_run["test"].add("libs/partners/mistralai")
dirs_to_run["test"].add("libs/partners/openai")
dirs_to_run["test"].add("libs/partners/anthropic")
dirs_to_run["test"].add("libs/partners/fireworks")
dirs_to_run["test"].add("libs/partners/groq")
elif file.startswith("libs/partners"):
partner_dir = file.split("/")[2]
if os.path.isdir(f"libs/partners/{partner_dir}") and [
filename
for filename in os.listdir(f"libs/partners/{partner_dir}")
if not filename.startswith(".")
] != ["README.md"]:
dirs_to_run["test"].add(f"libs/partners/{partner_dir}")
# Skip codspeed for partners without benchmarks or in IGNORED_PARTNERS
if partner_dir not in IGNORED_PARTNERS:
dirs_to_run["codspeed"].add(f"libs/partners/{partner_dir}")
# Skip if the directory was deleted or is just a tombstone readme
elif file.startswith("libs/"):
# Check if this is a root-level file in libs/ (e.g., libs/README.md)
file_parts = file.split("/")
if len(file_parts) == 2:
# Root-level file in libs/, skip it (no tests needed)
continue
raise ValueError(
f"Unknown lib: {file}. check_diff.py likely needs "
"an update for this new library!"
)
elif file in [
"pyproject.toml",
"uv.lock",
]: # root uv files
docs_edited = True
dependents = dependents_graph()
# we now have dirs_by_job
# todo: clean this up
map_job_to_configs = {
job: _get_configs_for_multi_dirs(job, dirs_to_run, dependents)
for job in [
"lint",
"test",
"extended-tests",
"compile-integration-tests",
"dependencies",
"test-pydantic",
"codspeed",
]
}
for key, value in map_job_to_configs.items():
json_output = json.dumps(value)
print(f"{key}={json_output}")
================================================
FILE: .github/scripts/check_prerelease_dependencies.py
================================================
"""Check that no dependencies allow prereleases unless we're releasing a prerelease."""
import sys
import tomllib
if __name__ == "__main__":
# Get the TOML file path from the command line argument
toml_file = sys.argv[1]
with open(toml_file, "rb") as file:
toml_data = tomllib.load(file)
# See if we're releasing an rc or dev version
version = toml_data["project"]["version"]
releasing_rc = "rc" in version or "dev" in version
# If not, iterate through dependencies and make sure none allow prereleases
if not releasing_rc:
dependencies = toml_data["project"]["dependencies"]
for dep_version in dependencies:
dep_version_string = (
dep_version["version"] if isinstance(dep_version, dict) else dep_version
)
if "rc" in dep_version_string:
raise ValueError(
f"Dependency {dep_version} has a prerelease version. Please remove this."
)
if isinstance(dep_version, dict) and dep_version.get(
"allow-prereleases", False
):
raise ValueError(
f"Dependency {dep_version} has allow-prereleases set to true. Please remove this."
)
================================================
FILE: .github/scripts/get_min_versions.py
================================================
"""Get minimum versions of dependencies from a pyproject.toml file."""
import sys
from collections import defaultdict
if sys.version_info >= (3, 11):
import tomllib
else:
# For Python 3.10 and below, which doesnt have stdlib tomllib
import tomli as tomllib
import re
from typing import List
import requests
from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet
from packaging.version import Version, parse
MIN_VERSION_LIBS = [
"langchain-core",
"langchain",
"langchain-text-splitters",
"numpy",
"SQLAlchemy",
]
# some libs only get checked on release because of simultaneous changes in
# multiple libs
SKIP_IF_PULL_REQUEST = [
"langchain-core",
"langchain-text-splitters",
"langchain",
]
def get_pypi_versions(package_name: str) -> List[str]:
"""Fetch all available versions for a package from PyPI.
Args:
package_name: Name of the package
Returns:
List of all available versions
Raises:
requests.exceptions.RequestException: If PyPI API request fails
KeyError: If package not found or response format unexpected
"""
pypi_url = f"https://pypi.org/pypi/{package_name}/json"
response = requests.get(pypi_url, timeout=10.0)
response.raise_for_status()
return list(response.json()["releases"].keys())
def get_minimum_version(package_name: str, spec_string: str) -> str | None:
"""Find the minimum published version that satisfies the given constraints.
Args:
package_name: Name of the package
spec_string: Version specification string (e.g., ">=0.2.43,<0.4.0,!=0.3.0")
Returns:
Minimum compatible version or None if no compatible version found
"""
# Rewrite occurrences of ^0.0.z to 0.0.z (can be anywhere in constraint string)
spec_string = re.sub(r"\^0\.0\.(\d+)", r"0.0.\1", spec_string)
# Rewrite occurrences of ^0.y.z to >=0.y.z,<0.y+1 (can be anywhere in constraint string)
for y in range(1, 10):
spec_string = re.sub(
rf"\^0\.{y}\.(\d+)", rf">=0.{y}.\1,<0.{y + 1}", spec_string
)
# Rewrite occurrences of ^x.y.z to >=x.y.z,<x+1.0.0 (can be anywhere in constraint string)
for x in range(1, 10):
spec_string = re.sub(
rf"\^{x}\.(\d+)\.(\d+)", rf">={x}.\1.\2,<{x + 1}", spec_string
)
spec_set = SpecifierSet(spec_string)
all_versions = get_pypi_versions(package_name)
valid_versions = []
for version_str in all_versions:
try:
version = parse(version_str)
if spec_set.contains(version):
valid_versions.append(version)
except ValueError:
continue
return str(min(valid_versions)) if valid_versions else None
def _check_python_version_from_requirement(
requirement: Requirement, python_version: str
) -> bool:
if not requirement.marker:
return True
else:
marker_str = str(requirement.marker)
if "python_version" in marker_str or "python_full_version" in marker_str:
python_version_str = "".join(
char
for char in marker_str
if char.isdigit() or char in (".", "<", ">", "=", ",")
)
return check_python_version(python_version, python_version_str)
return True
def get_min_version_from_toml(
toml_path: str,
versions_for: str,
python_version: str,
*,
include: list | None = None,
):
# Parse the TOML file
with open(toml_path, "rb") as file:
toml_data = tomllib.load(file)
dependencies = defaultdict(list)
for dep in toml_data["project"]["dependencies"]:
requirement = Requirement(dep)
dependencies[requirement.name].append(requirement)
# Initialize a dictionary to store the minimum versions
min_versions = {}
# Iterate over the libs in MIN_VERSION_LIBS
for lib in set(MIN_VERSION_LIBS + (include or [])):
if versions_for == "pull_request" and lib in SKIP_IF_PULL_REQUEST:
# some libs only get checked on release because of simultaneous
# changes in multiple libs
continue
# Check if the lib is present in the dependencies
if lib in dependencies:
if include and lib not in include:
continue
requirements = dependencies[lib]
for requirement in requirements:
if _check_python_version_from_requirement(requirement, python_version):
version_string = str(requirement.specifier)
break
# Use parse_version to get the minimum supported version from version_string
min_version = get_minimum_version(lib, version_string)
# Store the minimum version in the min_versions dictionary
min_versions[lib] = min_version
return min_versions
def check_python_version(version_string, constraint_string):
"""Check if the given Python version matches the given constraints.
Args:
version_string: A string representing the Python version (e.g. "3.8.5").
constraint_string: A string representing the package's Python version
constraints (e.g. ">=3.6, <4.0").
Returns:
True if the version matches the constraints
"""
# Rewrite occurrences of ^0.0.z to 0.0.z (can be anywhere in constraint string)
constraint_string = re.sub(r"\^0\.0\.(\d+)", r"0.0.\1", constraint_string)
# Rewrite occurrences of ^0.y.z to >=0.y.z,<0.y+1.0 (can be anywhere in constraint string)
for y in range(1, 10):
constraint_string = re.sub(
rf"\^0\.{y}\.(\d+)", rf">=0.{y}.\1,<0.{y + 1}.0", constraint_string
)
# Rewrite occurrences of ^x.y.z to >=x.y.z,<x+1.0.0 (can be anywhere in constraint string)
for x in range(1, 10):
constraint_string = re.sub(
rf"\^{x}\.0\.(\d+)", rf">={x}.0.\1,<{x + 1}.0.0", constraint_string
)
try:
version = Version(version_string)
constraints = SpecifierSet(constraint_string)
return version in constraints
except Exception as e:
print(f"Error: {e}")
return False
if __name__ == "__main__":
# Get the TOML file path from the command line argument
toml_file = sys.argv[1]
versions_for = sys.argv[2]
python_version = sys.argv[3]
assert versions_for in ["release", "pull_request"]
# Call the function to get the minimum versions
min_versions = get_min_version_from_toml(toml_file, versions_for, python_version)
print(" ".join([f"{lib}=={version}" for lib, version in min_versions.items()]))
================================================
FILE: .github/scripts/pr-labeler-config.json
================================================
{
"trustedThreshold": 5,
"labelColor": "b76e79",
"sizeThresholds": [
{ "label": "size: XS", "max": 50 },
{ "label": "size: S", "max": 200 },
{ "label": "size: M", "max": 500 },
{ "label": "size: L", "max": 1000 },
{ "label": "size: XL" }
],
"excludedFiles": ["uv.lock"],
"excludedPaths": ["docs/"],
"typeToLabel": {
"feat": "feature",
"fix": "fix",
"docs": "documentation",
"style": "linting",
"refactor": "refactor",
"perf": "performance",
"test": "tests",
"build": "infra",
"ci": "infra",
"chore": "infra",
"revert": "revert",
"release": "release",
"hotfix": "hotfix",
"breaking": "breaking"
},
"scopeToLabel": {
"core": "core",
"langchain": "langchain",
"langchain-classic": "langchain-classic",
"model-profiles": "model-profiles",
"standard-tests": "standard-tests",
"text-splitters": "text-splitters",
"anthropic": "anthropic",
"chroma": "chroma",
"deepseek": "deepseek",
"exa": "exa",
"fireworks": "fireworks",
"groq": "groq",
"huggingface": "huggingface",
"mistralai": "mistralai",
"nomic": "nomic",
"ollama": "ollama",
"openai": "openai",
"openrouter": "openrouter",
"perplexity": "perplexity",
"qdrant": "qdrant",
"xai": "xai",
"deps": "dependencies",
"docs": "documentation",
"infra": "infra"
},
"fileRules": [
{ "label": "core", "prefix": "libs/core/", "skipExcludedFiles": true },
{ "label": "langchain-classic", "prefix": "libs/langchain/", "skipExcludedFiles": true },
{ "label": "langchain", "prefix": "libs/langchain_v1/", "skipExcludedFiles": true },
{ "label": "standard-tests", "prefix": "libs/standard-tests/", "skipExcludedFiles": true },
{ "label": "model-profiles", "prefix": "libs/model-profiles/", "skipExcludedFiles": true },
{ "label": "text-splitters", "prefix": "libs/text-splitters/", "skipExcludedFiles": true },
{ "label": "integration", "prefix": "libs/partners/", "skipExcludedFiles": true },
{ "label": "anthropic", "prefix": "libs/partners/anthropic/", "skipExcludedFiles": true },
{ "label": "chroma", "prefix": "libs/partners/chroma/", "skipExcludedFiles": true },
{ "label": "deepseek", "prefix": "libs/partners/deepseek/", "skipExcludedFiles": true },
{ "label": "exa", "prefix": "libs/partners/exa/", "skipExcludedFiles": true },
{ "label": "fireworks", "prefix": "libs/partners/fireworks/", "skipExcludedFiles": true },
{ "label": "groq", "prefix": "libs/partners/groq/", "skipExcludedFiles": true },
{ "label": "huggingface", "prefix": "libs/partners/huggingface/", "skipExcludedFiles": true },
{ "label": "mistralai", "prefix": "libs/partners/mistralai/", "skipExcludedFiles": true },
{ "label": "nomic", "prefix": "libs/partners/nomic/", "skipExcludedFiles": true },
{ "label": "ollama", "prefix": "libs/partners/ollama/", "skipExcludedFiles": true },
{ "label": "openai", "prefix": "libs/partners/openai/", "skipExcludedFiles": true },
{ "label": "openrouter", "prefix": "libs/partners/openrouter/", "skipExcludedFiles": true },
{ "label": "perplexity", "prefix": "libs/partners/perplexity/", "skipExcludedFiles": true },
{ "label": "qdrant", "prefix": "libs/partners/qdrant/", "skipExcludedFiles": true },
{ "label": "xai", "prefix": "libs/partners/xai/", "skipExcludedFiles": true },
{ "label": "github_actions", "prefix": ".github/workflows/" },
{ "label": "github_actions", "prefix": ".github/actions/" },
{ "label": "dependencies", "suffix": "pyproject.toml" },
{ "label": "dependencies", "exact": "uv.lock" },
{ "label": "dependencies", "pattern": "(?:^|/)requirements[^/]*\\.txt$" }
]
}
================================================
FILE: .github/scripts/pr-labeler.js
================================================
// Shared helpers for pr_labeler.yml and tag-external-issues.yml.
//
// Usage from actions/github-script (requires actions/checkout first):
// const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
const fs = require('fs');
const path = require('path');
function loadConfig() {
const configPath = path.join(__dirname, 'pr-labeler-config.json');
let raw;
try {
raw = fs.readFileSync(configPath, 'utf8');
} catch (e) {
throw new Error(`Failed to read ${configPath}: ${e.message}`);
}
let config;
try {
config = JSON.parse(raw);
} catch (e) {
throw new Error(`Failed to parse pr-labeler-config.json: ${e.message}`);
}
const required = [
'labelColor', 'sizeThresholds', 'fileRules',
'typeToLabel', 'scopeToLabel', 'trustedThreshold',
'excludedFiles', 'excludedPaths',
];
const missing = required.filter(k => !(k in config));
if (missing.length > 0) {
throw new Error(`pr-labeler-config.json missing required keys: ${missing.join(', ')}`);
}
return config;
}
function init(github, owner, repo, config, core) {
if (!core) {
throw new Error('init() requires a `core` parameter (e.g., from actions/github-script)');
}
const {
trustedThreshold,
labelColor,
sizeThresholds,
scopeToLabel,
typeToLabel,
fileRules: fileRulesDef,
excludedFiles,
excludedPaths,
} = config;
const sizeLabels = sizeThresholds.map(t => t.label);
const allTypeLabels = [...new Set(Object.values(typeToLabel))];
const tierLabels = ['new-contributor', 'trusted-contributor'];
// ── Label management ──────────────────────────────────────────────
async function ensureLabel(name, color = labelColor) {
try {
await github.rest.issues.getLabel({ owner, repo, name });
} catch (e) {
if (e.status !== 404) throw e;
try {
await github.rest.issues.createLabel({ owner, repo, name, color });
} catch (createErr) {
// 422 = label created by a concurrent run between our get and create
if (createErr.status !== 422) throw createErr;
core.info(`Label "${name}" creation returned 422 (likely already exists)`);
}
}
}
// ── Size calculation ──────────────────────────────────────────────
function getSizeLabel(totalChanged) {
for (const t of sizeThresholds) {
if (t.max != null && totalChanged < t.max) return t.label;
}
// Last entry has no max — it's the catch-all
return sizeThresholds[sizeThresholds.length - 1].label;
}
function computeSize(files) {
const excluded = new Set(excludedFiles);
const totalChanged = files.reduce((sum, f) => {
const p = f.filename ?? '';
const base = p.split('/').pop();
if (excluded.has(base)) return sum;
for (const prefix of excludedPaths) {
if (p.startsWith(prefix)) return sum;
}
return sum + (f.additions ?? 0) + (f.deletions ?? 0);
}, 0);
return { totalChanged, sizeLabel: getSizeLabel(totalChanged) };
}
// ── File-based labels ─────────────────────────────────────────────
function buildFileRules() {
return fileRulesDef.map((rule, i) => {
let test;
if (rule.prefix) test = p => p.startsWith(rule.prefix);
else if (rule.suffix) test = p => p.endsWith(rule.suffix);
else if (rule.exact) test = p => p === rule.exact;
else if (rule.pattern) {
const re = new RegExp(rule.pattern);
test = p => re.test(p);
} else {
throw new Error(
`fileRules[${i}] (label: "${rule.label}") has no recognized matcher ` +
`(expected one of: prefix, suffix, exact, pattern)`
);
}
return { label: rule.label, test, skipExcluded: !!rule.skipExcludedFiles };
});
}
function matchFileLabels(files, fileRules) {
const rules = fileRules || buildFileRules();
const excluded = new Set(excludedFiles);
const labels = new Set();
for (const rule of rules) {
// skipExcluded: ignore files whose basename is in the top-level
// "excludedFiles" list (e.g. uv.lock) so lockfile-only changes
// don't trigger package labels.
const candidates = rule.skipExcluded
? files.filter(f => !excluded.has((f.filename ?? '').split('/').pop()))
: files;
if (candidates.some(f => rule.test(f.filename ?? ''))) {
labels.add(rule.label);
}
}
return labels;
}
// ── Title-based labels ────────────────────────────────────────────
function matchTitleLabels(title) {
const labels = new Set();
const m = (title ?? '').match(/^(\w+)(?:\(([^)]+)\))?(!)?:/);
if (!m) return { labels, type: null, typeLabel: null, scopes: [], breaking: false };
const type = m[1].toLowerCase();
const scopeStr = m[2] ?? '';
const breaking = !!m[3];
const typeLabel = typeToLabel[type] || null;
if (typeLabel) labels.add(typeLabel);
if (breaking) labels.add('breaking');
const scopes = scopeStr.split(',').map(s => s.trim()).filter(Boolean);
for (const scope of scopes) {
const sl = scopeToLabel[scope];
if (sl) labels.add(sl);
}
return { labels, type, typeLabel, scopes, breaking };
}
// ── Org membership ────────────────────────────────────────────────
async function checkMembership(author, userType) {
if (userType === 'Bot') {
console.log(`${author} is a Bot — treating as internal`);
return { isExternal: false };
}
try {
const membership = await github.rest.orgs.getMembershipForUser({
org: 'langchain-ai',
username: author,
});
const isExternal = membership.data.state !== 'active';
console.log(
isExternal
? `${author} has pending membership — treating as external`
: `${author} is an active member of langchain-ai`,
);
return { isExternal };
} catch (e) {
if (e.status === 404) {
console.log(`${author} is not a member of langchain-ai`);
return { isExternal: true };
}
// Non-404 errors (rate limit, auth failure, server error) must not
// silently default to external — rethrow to fail the step.
throw new Error(
`Membership check failed for ${author} (${e.status}): ${e.message}`,
);
}
}
// ── Contributor analysis ──────────────────────────────────────────
async function getContributorInfo(contributorCache, author, userType) {
if (contributorCache.has(author)) return contributorCache.get(author);
const { isExternal } = await checkMembership(author, userType);
let mergedCount = null;
if (isExternal) {
try {
const result = await github.rest.search.issuesAndPullRequests({
q: `repo:${owner}/${repo} is:pr is:merged author:"${author}"`,
per_page: 1,
});
mergedCount = result?.data?.total_count ?? null;
} catch (e) {
if (e?.status !== 422) throw e;
core.warning(`Search failed for ${author}; skipping tier.`);
}
}
const info = { isExternal, mergedCount };
contributorCache.set(author, info);
return info;
}
// ── Tier label resolution ───────────────────────────────────────────
async function applyTierLabel(issueNumber, author, { skipNewContributor = false } = {}) {
let mergedCount;
try {
const result = await github.rest.search.issuesAndPullRequests({
q: `repo:${owner}/${repo} is:pr is:merged author:"${author}"`,
per_page: 1,
});
mergedCount = result?.data?.total_count;
} catch (error) {
if (error?.status !== 422) throw error;
core.warning(`Search failed for ${author}; skipping tier label.`);
return;
}
if (mergedCount == null) {
core.warning(`Search response missing total_count for ${author}; skipping tier label.`);
return;
}
let tierLabel = null;
if (mergedCount >= trustedThreshold) tierLabel = 'trusted-contributor';
else if (mergedCount === 0 && !skipNewContributor) tierLabel = 'new-contributor';
if (tierLabel) {
await ensureLabel(tierLabel);
await github.rest.issues.addLabels({
owner, repo, issue_number: issueNumber, labels: [tierLabel],
});
console.log(`Applied '${tierLabel}' to #${issueNumber} (${mergedCount} merged PRs)`);
} else {
console.log(`No tier label for ${author} (${mergedCount} merged PRs)`);
}
return tierLabel;
}
return {
ensureLabel,
getSizeLabel,
computeSize,
buildFileRules,
matchFileLabels,
matchTitleLabels,
allTypeLabels,
checkMembership,
getContributorInfo,
applyTierLabel,
sizeLabels,
tierLabels,
trustedThreshold,
labelColor,
};
}
function loadAndInit(github, owner, repo, core) {
const config = loadConfig();
return { config, h: init(github, owner, repo, config, core) };
}
module.exports = { loadConfig, init, loadAndInit };
================================================
FILE: .github/tools/git-restore-mtime
================================================
#!/usr/bin/env python3
#
# git-restore-mtime - Change mtime of files based on commit date of last change
#
# Copyright (C) 2012 Rodrigo Silva (MestreLion) <linux@rodrigosilva.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. See <http://www.gnu.org/licenses/gpl.html>
#
# Source: https://github.com/MestreLion/git-tools
# Version: July 13, 2023 (commit hash 5f832e72453e035fccae9d63a5056918d64476a2)
"""
Change the modification time (mtime) of files in work tree, based on the
date of the most recent commit that modified the file, including renames.
Ignores untracked files and uncommitted deletions, additions and renames, and
by default modifications too.
---
Useful prior to generating release tarballs, so each file is archived with a
date that is similar to the date when the file was actually last modified,
assuming the actual modification date and its commit date are close.
"""
# TODO:
# - Add -z on git whatchanged/ls-files, so we don't deal with filename decoding
# - When Python is bumped to 3.7, use text instead of universal_newlines on subprocess
# - Update "Statistics for some large projects" with modern hardware and repositories.
# - Create a README.md for git-restore-mtime alone. It deserves extensive documentation
# - Move Statistics there
# - See git-extras as a good example on project structure and documentation
# FIXME:
# - When current dir is outside the worktree, e.g. using --work-tree, `git ls-files`
# assume any relative pathspecs are to worktree root, not the current dir. As such,
# relative pathspecs may not work.
# - Renames are tricky:
# - R100 should not change mtime, but original name is not on filelist. Should
# track renames until a valid (A, M) mtime found and then set on current name.
# - Should set mtime for both current and original directories.
# - Check mode changes with unchanged blobs?
# - Check file (A, D) for the directory mtime is not sufficient:
# - Renames also change dir mtime, unless rename was on a parent dir
# - If most recent change of all files in a dir was a Modification (M),
# dir might not be touched at all.
# - Dirs containing only subdirectories but no direct files will also
# not be touched. They're files' [grand]parent dir, but never their dirname().
# - Some solutions:
# - After files done, perform some dir processing for missing dirs, finding latest
# file (A, D, R)
# - Simple approach: dir mtime is the most recent child (dir or file) mtime
# - Use a virtual concept of "created at most at" to fill missing info, bubble up
# to parents and grandparents
# - When handling [grand]parent dirs, stay inside <pathspec>
# - Better handling of merge commits. `-m` is plain *wrong*. `-c/--cc` is perfect, but
# painfully slow. First pass without merge commits is not accurate. Maybe add a new
# `--accurate` mode for `--cc`?
if __name__ != "__main__":
raise ImportError("{} should not be used as a module.".format(__name__))
import argparse
import datetime
import logging
import os.path
import shlex
import signal
import subprocess
import sys
import time
__version__ = "2022.12+dev"
# Update symlinks only if the platform supports not following them
UPDATE_SYMLINKS = bool(os.utime in getattr(os, "supports_follow_symlinks", []))
# Call os.path.normpath() only if not in a POSIX platform (Windows)
NORMALIZE_PATHS = os.path.sep != "/"
# How many files to process in each batch when re-trying merge commits
STEPMISSING = 100
# (Extra) keywords for the os.utime() call performed by touch()
UTIME_KWS = {} if not UPDATE_SYMLINKS else {"follow_symlinks": False}
# Command-line interface ######################################################
def parse_args():
parser = argparse.ArgumentParser(description=__doc__.split("\n---")[0])
group = parser.add_mutually_exclusive_group()
group.add_argument(
"--quiet",
"-q",
dest="loglevel",
action="store_const",
const=logging.WARNING,
default=logging.INFO,
help="Suppress informative messages and summary statistics.",
)
group.add_argument(
"--verbose",
"-v",
action="count",
help="""
Print additional information for each processed file.
Specify twice to further increase verbosity.
""",
)
parser.add_argument(
"--cwd",
"-C",
metavar="DIRECTORY",
help="""
Run as if %(prog)s was started in directory %(metavar)s.
This affects how --work-tree, --git-dir and PATHSPEC arguments are handled.
See 'man 1 git' or 'git --help' for more information.
""",
)
parser.add_argument(
"--git-dir",
dest="gitdir",
metavar="GITDIR",
help="""
Path to the git repository, by default auto-discovered by searching
the current directory and its parents for a .git/ subdirectory.
""",
)
parser.add_argument(
"--work-tree",
dest="workdir",
metavar="WORKTREE",
help="""
Path to the work tree root, by default the parent of GITDIR if it's
automatically discovered, or the current directory if GITDIR is set.
""",
)
parser.add_argument(
"--force",
"-f",
default=False,
action="store_true",
help="""
Force updating files with uncommitted modifications.
Untracked files and uncommitted deletions, renames and additions are
always ignored.
""",
)
parser.add_argument(
"--merge",
"-m",
default=False,
action="store_true",
help="""
Include merge commits.
Leads to more recent times and more files per commit, thus with the same
time, which may or may not be what you want.
Including merge commits may lead to fewer commits being evaluated as files
are found sooner, which can improve performance, sometimes substantially.
But as merge commits are usually huge, processing them may also take longer.
By default, merge commits are only used for files missing from regular commits.
""",
)
parser.add_argument(
"--first-parent",
default=False,
action="store_true",
help="""
Consider only the first parent, the "main branch", when evaluating merge commits.
Only effective when merge commits are processed, either when --merge is
used or when finding missing files after the first regular log search.
See --skip-missing.
""",
)
parser.add_argument(
"--skip-missing",
"-s",
dest="missing",
default=True,
action="store_false",
help="""
Do not try to find missing files.
If merge commits were not evaluated with --merge and some files were
not found in regular commits, by default %(prog)s searches for these
files again in the merge commits.
This option disables this retry, so files found only in merge commits
will not have their timestamp updated.
""",
)
parser.add_argument(
"--no-directories",
"-D",
dest="dirs",
default=True,
action="store_false",
help="""
Do not update directory timestamps.
By default, use the time of its most recently created, renamed or deleted file.
Note that just modifying a file will NOT update its directory time.
""",
)
parser.add_argument(
"--test",
"-t",
default=False,
action="store_true",
help="Test run: do not actually update any file timestamp.",
)
parser.add_argument(
"--commit-time",
"-c",
dest="commit_time",
default=False,
action="store_true",
help="Use commit time instead of author time.",
)
parser.add_argument(
"--oldest-time",
"-o",
dest="reverse_order",
default=False,
action="store_true",
help="""
Update times based on the oldest, instead of the most recent commit of a file.
This reverses the order in which the git log is processed to emulate a
file "creation" date. Note this will be inaccurate for files deleted and
re-created at later dates.
""",
)
parser.add_argument(
"--skip-older-than",
metavar="SECONDS",
type=int,
help="""
Ignore files that are currently older than %(metavar)s.
Useful in workflows that assume such files already have a correct timestamp,
as it may improve performance by processing fewer files.
""",
)
parser.add_argument(
"--skip-older-than-commit",
"-N",
default=False,
action="store_true",
help="""
Ignore files older than the timestamp it would be updated to.
Such files may be considered "original", likely in the author's repository.
""",
)
parser.add_argument(
"--unique-times",
default=False,
action="store_true",
help="""
Set the microseconds to a unique value per commit.
Allows telling apart changes that would otherwise have identical timestamps,
as git's time accuracy is in seconds.
""",
)
parser.add_argument(
"pathspec",
nargs="*",
metavar="PATHSPEC",
help="""
Only modify paths matching %(metavar)s, relative to current directory.
By default, update all but untracked files and submodules.
""",
)
parser.add_argument(
"--version",
"-V",
action="version",
version="%(prog)s version {version}".format(version=get_version()),
)
args_ = parser.parse_args()
if args_.verbose:
args_.loglevel = max(logging.TRACE, logging.DEBUG // args_.verbose)
args_.debug = args_.loglevel <= logging.DEBUG
return args_
def get_version(version=__version__):
if not version.endswith("+dev"):
return version
try:
cwd = os.path.dirname(os.path.realpath(__file__))
return Git(cwd=cwd, errors=False).describe().lstrip("v")
except Git.Error:
return "-".join((version, "unknown"))
# Helper functions ############################################################
def setup_logging():
"""Add TRACE logging level and corresponding method, return the root logger"""
logging.TRACE = TRACE = logging.DEBUG // 2
logging.Logger.trace = lambda _, m, *a, **k: _.log(TRACE, m, *a, **k)
return logging.getLogger()
def normalize(path):
r"""Normalize paths from git, handling non-ASCII characters.
Git stores paths as UTF-8 normalization form C.
If path contains non-ASCII or non-printable characters, git outputs the UTF-8
in octal-escaped notation, escaping double-quotes and backslashes, and then
double-quoting the whole path.
https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
This function reverts this encoding, so:
normalize(r'"Back\\slash_double\"quote_a\303\247a\303\255"') =>
r'Back\slash_double"quote_açaí')
Paths with invalid UTF-8 encoding, such as single 0x80-0xFF bytes (e.g, from
Latin1/Windows-1251 encoding) are decoded using surrogate escape, the same
method used by Python for filesystem paths. So 0xE6 ("æ" in Latin1, r'\\346'
from Git) is decoded as "\udce6". See https://peps.python.org/pep-0383/ and
https://vstinner.github.io/painful-history-python-filesystem-encoding.html
Also see notes on `windows/non-ascii-paths.txt` about path encodings on
non-UTF-8 platforms and filesystems.
"""
if path and path[0] == '"':
# Python 2: path = path[1:-1].decode("string-escape")
# Python 3: https://stackoverflow.com/a/46650050/624066
path = (
path[1:-1] # Remove enclosing double quotes
.encode("latin1") # Convert to bytes, required by 'unicode-escape'
.decode("unicode-escape") # Perform the actual octal-escaping decode
.encode("latin1") # 1:1 mapping to bytes, UTF-8 encoded
.decode("utf8", "surrogateescape")
) # Decode from UTF-8
if NORMALIZE_PATHS:
# Make sure the slash matches the OS; for Windows we need a backslash
path = os.path.normpath(path)
return path
def dummy(*_args, **_kwargs):
"""No-op function used in dry-run tests"""
def touch(path, mtime):
"""The actual mtime update"""
os.utime(path, (mtime, mtime), **UTIME_KWS)
def touch_ns(path, mtime_ns):
"""The actual mtime update, using nanoseconds for unique timestamps"""
os.utime(path, None, ns=(mtime_ns, mtime_ns), **UTIME_KWS)
def isodate(secs: int):
# time.localtime() accepts floats, but discards fractional part
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(secs))
def isodate_ns(ns: int):
# for integers fromtimestamp() is equivalent and ~16% slower than isodate()
return datetime.datetime.fromtimestamp(ns / 1000000000).isoformat(sep=" ")
def get_mtime_ns(secs: int, idx: int):
# Time resolution for filesystems and functions:
# ext-4 and other POSIX filesystems: 1 nanosecond
# NTFS (Windows default): 100 nanoseconds
# datetime.datetime() (due to 64-bit float epoch): 1 microsecond
us = idx % 1000000 # 10**6
return 1000 * (1000000 * secs + us)
def get_mtime_path(path):
return os.path.getmtime(path)
# Git class and parse_log(), the heart of the script ##########################
class Git:
def __init__(self, workdir=None, gitdir=None, cwd=None, errors=True):
self.gitcmd = ["git"]
self.errors = errors
self._proc = None
if workdir:
self.gitcmd.extend(("--work-tree", workdir))
if gitdir:
self.gitcmd.extend(("--git-dir", gitdir))
if cwd:
self.gitcmd.extend(("-C", cwd))
self.workdir, self.gitdir = self._get_repo_dirs()
def ls_files(self, paths: list = None):
return (normalize(_) for _ in self._run("ls-files --full-name", paths))
def ls_dirty(self, force=False):
return (
normalize(_[3:].split(" -> ", 1)[-1])
for _ in self._run("status --porcelain")
if _[:2] != "??" and (not force or (_[0] in ("R", "A") or _[1] == "D"))
)
def log(
self,
merge=False,
first_parent=False,
commit_time=False,
reverse_order=False,
paths: list = None,
):
cmd = "whatchanged --pretty={}".format("%ct" if commit_time else "%at")
if merge:
cmd += " -m"
if first_parent:
cmd += " --first-parent"
if reverse_order:
cmd += " --reverse"
return self._run(cmd, paths)
def describe(self):
return self._run("describe --tags", check=True)[0]
def terminate(self):
if self._proc is None:
return
try:
self._proc.terminate()
except OSError:
# Avoid errors on OpenBSD
pass
def _get_repo_dirs(self):
return (
os.path.normpath(_)
for _ in self._run(
"rev-parse --show-toplevel --absolute-git-dir", check=True
)
)
def _run(self, cmdstr: str, paths: list = None, output=True, check=False):
cmdlist = self.gitcmd + shlex.split(cmdstr)
if paths:
cmdlist.append("--")
cmdlist.extend(paths)
popen_args = dict(universal_newlines=True, encoding="utf8")
if not self.errors:
popen_args["stderr"] = subprocess.DEVNULL
log.trace("Executing: %s", " ".join(cmdlist))
if not output:
return subprocess.call(cmdlist, **popen_args)
if check:
try:
stdout: str = subprocess.check_output(cmdlist, **popen_args)
return stdout.splitlines()
except subprocess.CalledProcessError as e:
raise self.Error(e.returncode, e.cmd, e.output, e.stderr)
self._proc = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, **popen_args)
return (_.rstrip() for _ in self._proc.stdout)
def __del__(self):
self.terminate()
class Error(subprocess.CalledProcessError):
"""Error from git executable"""
def parse_log(filelist, dirlist, stats, git, merge=False, filterlist=None):
mtime = 0
datestr = isodate(0)
for line in git.log(
merge, args.first_parent, args.commit_time, args.reverse_order, filterlist
):
stats["loglines"] += 1
# Blank line between Date and list of files
if not line:
continue
# Date line
if line[0] != ":": # Faster than `not line.startswith(':')`
stats["commits"] += 1
mtime = int(line)
if args.unique_times:
mtime = get_mtime_ns(mtime, stats["commits"])
if args.debug:
datestr = isodate(mtime)
continue
# File line: three tokens if it describes a renaming, otherwise two
tokens = line.split("\t")
# Possible statuses:
# M: Modified (content changed)
# A: Added (created)
# D: Deleted
# T: Type changed: to/from regular file, symlinks, submodules
# R099: Renamed (moved), with % of unchanged content. 100 = pure rename
# Not possible in log: C=Copied, U=Unmerged, X=Unknown, B=pairing Broken
status = tokens[0].split(" ")[-1]
file = tokens[-1]
# Handles non-ASCII chars and OS path separator
file = normalize(file)
def do_file():
if args.skip_older_than_commit and get_mtime_path(file) <= mtime:
stats["skip"] += 1
return
if args.debug:
log.debug(
"%d\t%d\t%d\t%s\t%s",
stats["loglines"],
stats["commits"],
stats["files"],
datestr,
file,
)
try:
touch(os.path.join(git.workdir, file), mtime)
stats["touches"] += 1
except Exception as e:
log.error("ERROR: %s: %s", e, file)
stats["errors"] += 1
def do_dir():
if args.debug:
log.debug(
"%d\t%d\t-\t%s\t%s",
stats["loglines"],
stats["commits"],
datestr,
"{}/".format(dirname or "."),
)
try:
touch(os.path.join(git.workdir, dirname), mtime)
stats["dirtouches"] += 1
except Exception as e:
log.error("ERROR: %s: %s", e, dirname)
stats["direrrors"] += 1
if file in filelist:
stats["files"] -= 1
filelist.remove(file)
do_file()
if args.dirs and status in ("A", "D"):
dirname = os.path.dirname(file)
if dirname in dirlist:
dirlist.remove(dirname)
do_dir()
# All files done?
if not stats["files"]:
git.terminate()
return
# Main Logic ##################################################################
def main():
start = time.time() # yes, Wall time. CPU time is not realistic for users.
stats = {
_: 0
for _ in (
"loglines",
"commits",
"touches",
"skip",
"errors",
"dirtouches",
"direrrors",
)
}
logging.basicConfig(level=args.loglevel, format="%(message)s")
log.trace("Arguments: %s", args)
# First things first: Where and Who are we?
if args.cwd:
log.debug("Changing directory: %s", args.cwd)
try:
os.chdir(args.cwd)
except OSError as e:
log.critical(e)
return e.errno
# Using both os.chdir() and `git -C` is redundant, but might prevent side effects
# `git -C` alone could be enough if we make sure that:
# - all paths, including args.pathspec, are processed by git: ls-files, rev-parse
# - touch() / os.utime() path argument is always prepended with git.workdir
try:
git = Git(workdir=args.workdir, gitdir=args.gitdir, cwd=args.cwd)
except Git.Error as e:
# Not in a git repository, and git already informed user on stderr. So we just...
return e.returncode
# Get the files managed by git and build file list to be processed
if UPDATE_SYMLINKS and not args.skip_older_than:
filelist = set(git.ls_files(args.pathspec))
else:
filelist = set()
for path in git.ls_files(args.pathspec):
fullpath = os.path.join(git.workdir, path)
# Symlink (to file, to dir or broken - git handles the same way)
if not UPDATE_SYMLINKS and os.path.islink(fullpath):
log.warning(
"WARNING: Skipping symlink, no OS support for updates: %s", path
)
continue
# skip files which are older than given threshold
if (
args.skip_older_than
and start - get_mtime_path(fullpath) > args.skip_older_than
):
continue
# Always add files relative to worktree root
filelist.add(path)
# If --force, silently ignore uncommitted deletions (not in the filesystem)
# and renames / additions (will not be found in log anyway)
if args.force:
filelist -= set(git.ls_dirty(force=True))
# Otherwise, ignore any dirty files
else:
dirty = set(git.ls_dirty())
if dirty:
log.warning(
"WARNING: Modified files in the working directory were ignored."
"\nTo include such files, commit your changes or use --force."
)
filelist -= dirty
# Build dir list to be processed
dirlist = set(os.path.dirname(_) for _ in filelist) if args.dirs else set()
stats["totalfiles"] = stats["files"] = len(filelist)
log.info("{0:,} files to be processed in work dir".format(stats["totalfiles"]))
if not filelist:
# Nothing to do. Exit silently and without errors, just like git does
return
# Process the log until all files are 'touched'
log.debug("Line #\tLog #\tF.Left\tModification Time\tFile Name")
parse_log(filelist, dirlist, stats, git, args.merge, args.pathspec)
# Missing files
if filelist:
# Try to find them in merge logs, if not done already
# (usually HUGE, thus MUCH slower!)
if args.missing and not args.merge:
filterlist = list(filelist)
missing = len(filterlist)
log.info(
"{0:,} files not found in log, trying merge commits".format(missing)
)
for i in range(0, missing, STEPMISSING):
parse_log(
filelist,
dirlist,
stats,
git,
merge=True,
filterlist=filterlist[i : i + STEPMISSING],
)
# Still missing some?
for file in filelist:
log.warning("WARNING: not found in the log: %s", file)
# Final statistics
# Suggestion: use git-log --before=mtime to brag about skipped log entries
def log_info(msg, *a, width=13):
ifmt = "{:%d,}" % (width,) # not using 'n' for consistency with ffmt
ffmt = "{:%d,.2f}" % (width,)
# %-formatting lacks a thousand separator, must pre-render with .format()
log.info(msg.replace("%d", ifmt).replace("%f", ffmt).format(*a))
log_info(
"Statistics:\n%f seconds\n%d log lines processed\n%d commits evaluated",
time.time() - start,
stats["loglines"],
stats["commits"],
)
if args.dirs:
if stats["direrrors"]:
log_info("%d directory update errors", stats["direrrors"])
log_info("%d directories updated", stats["dirtouches"])
if stats["touches"] != stats["totalfiles"]:
log_info("%d files", stats["totalfiles"])
if stats["skip"]:
log_info("%d files skipped", stats["skip"])
if stats["files"]:
log_info("%d files missing", stats["files"])
if stats["errors"]:
log_info("%d file update errors", stats["errors"])
log_info("%d files updated", stats["touches"])
if args.test:
log.info("TEST RUN - No files modified!")
# Keep only essential, global assignments here. Any other logic must be in main()
log = setup_logging()
args = parse_args()
# Set the actual touch() and other functions based on command-line arguments
if args.unique_times:
touch = touch_ns
isodate = isodate_ns
# Make sure this is always set last to ensure --test behaves as intended
if args.test:
touch = dummy
# UI done, it's showtime!
try:
sys.exit(main())
except KeyboardInterrupt:
log.info("\nAborting")
signal.signal(signal.SIGINT, signal.SIG_DFL)
os.kill(os.getpid(), signal.SIGINT)
================================================
FILE: .github/workflows/_compile_integration_test.yml
================================================
# Validates that a package's integration tests compile without syntax or import errors.
#
# (If an integration test fails to compile, it won't run.)
#
# Called as part of check_diffs.yml workflow
#
# Runs pytest with compile marker to check syntax/imports.
name: "🔗 Compile Integration Tests"
on:
workflow_call:
inputs:
working-directory:
required: true
type: string
description: "From which folder this pipeline executes"
python-version:
required: true
type: string
description: "Python version to use"
permissions:
contents: read
env:
UV_FROZEN: "true"
jobs:
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "Python ${{ inputs.python-version }}"
steps:
- uses: actions/checkout@v6
- name: "🐍 Set up Python ${{ inputs.python-version }} + UV"
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
cache-suffix: compile-integration-tests-${{ inputs.working-directory }}
working-directory: ${{ inputs.working-directory }}
- name: "📦 Install Integration Dependencies"
shell: bash
run: uv sync --group test --group test_integration
- name: "🔗 Check Integration Tests Compile"
shell: bash
run: uv run pytest -m compile tests/integration_tests
- name: "🧹 Verify Clean Working Directory"
shell: bash
run: |
set -eu
STATUS="$(git status)"
echo "$STATUS"
# grep will exit non-zero if the target message isn't found,
# and `set -e` above will cause the step to fail.
echo "$STATUS" | grep 'nothing to commit, working tree clean'
================================================
FILE: .github/workflows/_lint.yml
================================================
# Runs linting.
#
# Uses the package's Makefile to run the checks, specifically the
# `lint_package` and `lint_tests` targets.
#
# Called as part of check_diffs.yml workflow.
name: "🧹 Linting"
on:
workflow_call:
inputs:
working-directory:
required: true
type: string
description: "From which folder this pipeline executes"
python-version:
required: true
type: string
description: "Python version to use"
permissions:
contents: read
env:
WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }}
# This env var allows us to get inline annotations when ruff has complaints.
RUFF_OUTPUT_FORMAT: github
UV_FROZEN: "true"
jobs:
# Linting job - runs quality checks on package and test code
build:
name: "Python ${{ inputs.python-version }}"
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: "📋 Checkout Code"
uses: actions/checkout@v6
- name: "🐍 Set up Python ${{ inputs.python-version }} + UV"
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
cache-suffix: lint-${{ inputs.working-directory }}
working-directory: ${{ inputs.working-directory }}
# - name: "🔒 Verify Lockfile is Up-to-Date"
# working-directory: ${{ inputs.working-directory }}
# run: |
# unset UV_FROZEN
# uv lock --check
- name: "📦 Install Lint & Typing Dependencies"
working-directory: ${{ inputs.working-directory }}
run: |
uv sync --group lint --group typing
- name: "🔍 Analyze Package Code with Linters"
working-directory: ${{ inputs.working-directory }}
run: |
make lint_package
- name: "📦 Install Test Dependencies (non-partners)"
# (For directories NOT starting with libs/partners/)
if: ${{ ! startsWith(inputs.working-directory, 'libs/partners/') }}
working-directory: ${{ inputs.working-directory }}
run: |
uv sync --inexact --group test
- name: "📦 Install Test Dependencies"
if: ${{ startsWith(inputs.working-directory, 'libs/partners/') }}
working-directory: ${{ inputs.working-directory }}
run: |
uv sync --inexact --group test --group test_integration
- name: "🔍 Analyze Test Code with Linters"
working-directory: ${{ inputs.working-directory }}
run: |
make lint_tests
================================================
FILE: .github/workflows/_refresh_model_profiles.yml
================================================
# Reusable workflow: refreshes model profile data for any repo that uses the
# `langchain-profiles` CLI. Creates (or updates) a pull request with the
# resulting changes.
#
# Callers MUST set `permissions: { contents: write, pull-requests: write }` —
# reusable workflows cannot escalate the caller's token permissions.
#
# ── Example: external repo (langchain-google) ──────────────────────────
#
# jobs:
# refresh-profiles:
# uses: langchain-ai/langchain/.github/workflows/_refresh_model_profiles.yml@master
# with:
# providers: >-
# [
# {"provider":"google", "data_dir":"libs/genai/langchain_google_genai/data"},
# ]
# secrets:
# MODEL_PROFILE_BOT_APP_ID: ${{ secrets.MODEL_PROFILE_BOT_APP_ID }}
# MODEL_PROFILE_BOT_PRIVATE_KEY: ${{ secrets.MODEL_PROFILE_BOT_PRIVATE_KEY }}
name: "Refresh Model Profiles (reusable)"
on:
workflow_call:
inputs:
providers:
description: >-
JSON array of objects, each with `provider` (models.dev provider ID)
and `data_dir` (path relative to repo root where `_profiles.py` and
`profile_augmentations.toml` live).
required: true
type: string
cli-path:
description: >-
Path (relative to workspace) to an existing `libs/model-profiles`
checkout. When set the workflow skips cloning the langchain repo and
uses this directory for the CLI instead. Useful when the caller IS
the langchain monorepo.
required: false
type: string
default: ""
cli-ref:
description: >-
Git ref of langchain-ai/langchain to checkout for the CLI.
Ignored when `cli-path` is set.
required: false
type: string
default: master
add-paths:
description: "Glob for files to stage in the PR commit."
required: false
type: string
default: "**/_profiles.py"
pr-branch:
description: "Branch name for the auto-created PR."
required: false
type: string
default: bot/refresh-model-profiles
pr-title:
description: "PR / commit title."
required: false
type: string
default: "chore(model-profiles): refresh model profile data"
pr-body:
description: "PR body."
required: false
type: string
default: |
Automated refresh of model profile data via `langchain-profiles refresh`.
🤖 Generated by the `refresh_model_profiles` workflow.
pr-labels:
description: "Comma-separated labels to apply to the PR."
required: false
type: string
default: bot
secrets:
MODEL_PROFILE_BOT_APP_ID:
required: true
MODEL_PROFILE_BOT_PRIVATE_KEY:
required: true
permissions:
contents: write
pull-requests: write
jobs:
refresh-profiles:
name: refresh model profiles
runs-on: ubuntu-latest
steps:
- name: "📋 Checkout"
uses: actions/checkout@v6
- name: "📋 Checkout langchain-profiles CLI"
if: inputs.cli-path == ''
uses: actions/checkout@v6
with:
repository: langchain-ai/langchain
ref: ${{ inputs.cli-ref }}
sparse-checkout: libs/model-profiles
path: _langchain-cli
- name: "🔧 Resolve CLI directory"
id: cli
env:
CLI_PATH: ${{ inputs.cli-path }}
run: |
if [ -n "${CLI_PATH}" ]; then
resolved="${GITHUB_WORKSPACE}/${CLI_PATH}"
if [ ! -d "${resolved}" ]; then
echo "::error::cli-path '${CLI_PATH}' does not exist at ${resolved}"
exit 1
fi
echo "dir=${CLI_PATH}" >> "$GITHUB_OUTPUT"
else
echo "dir=_langchain-cli/libs/model-profiles" >> "$GITHUB_OUTPUT"
fi
- name: "🐍 Set up Python + uv"
uses: astral-sh/setup-uv@0ca8f610542aa7f4acaf39e65cf4eb3c35091883 # v7
with:
version: "0.5.25"
python-version: "3.12"
enable-cache: true
cache-dependency-glob: "**/model-profiles/uv.lock"
- name: "📦 Install langchain-profiles CLI"
working-directory: ${{ steps.cli.outputs.dir }}
run: uv sync --frozen --no-group test --no-group dev --no-group lint
- name: "✅ Validate providers input"
env:
PROVIDERS_JSON: ${{ inputs.providers }}
run: |
echo "${PROVIDERS_JSON}" | jq -e 'type == "array" and length > 0' > /dev/null || {
echo "::error::providers input must be a non-empty JSON array"
exit 1
}
echo "${PROVIDERS_JSON}" | jq -e 'all(has("provider") and has("data_dir"))' > /dev/null || {
echo "::error::every entry in providers must have 'provider' and 'data_dir' keys"
exit 1
}
- name: "🔄 Refresh profiles"
env:
PROVIDERS_JSON: ${{ inputs.providers }}
run: |
cli_dir="${GITHUB_WORKSPACE}/${{ steps.cli.outputs.dir }}"
failed=""
mapfile -t rows < <(echo "${PROVIDERS_JSON}" | jq -c '.[]')
for row in "${rows[@]}"; do
provider=$(echo "${row}" | jq -r '.provider')
data_dir=$(echo "${row}" | jq -r '.data_dir')
echo "--- Refreshing ${provider} -> ${data_dir} ---"
if ! echo y | uv run --frozen --project "${cli_dir}" \
langchain-profiles refresh \
--provider "${provider}" \
--data-dir "${GITHUB_WORKSPACE}/${data_dir}"; then
echo "::error::Failed to refresh provider: ${provider}"
failed="${failed} ${provider}"
fi
done
if [ -n "${failed}" ]; then
echo "::error::The following providers failed:${failed}"
exit 1
fi
- name: "🔑 Generate GitHub App token"
id: app-token
uses: actions/create-github-app-token@v3
with:
app-id: ${{ secrets.MODEL_PROFILE_BOT_APP_ID }}
private-key: ${{ secrets.MODEL_PROFILE_BOT_PRIVATE_KEY }}
- name: "🔀 Create pull request"
id: create-pr
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8
with:
token: ${{ steps.app-token.outputs.token }}
branch: ${{ inputs.pr-branch }}
commit-message: ${{ inputs.pr-title }}
title: ${{ inputs.pr-title }}
body: ${{ inputs.pr-body }}
labels: ${{ inputs.pr-labels }}
add-paths: ${{ inputs.add-paths }}
- name: "📝 Summary"
if: always()
env:
PR_OP: ${{ steps.create-pr.outputs.pull-request-operation }}
PR_URL: ${{ steps.create-pr.outputs.pull-request-url }}
JOB_STATUS: ${{ job.status }}
run: |
if [ "${PR_OP}" = "created" ] || [ "${PR_OP}" = "updated" ]; then
echo "### ✅ PR ${PR_OP}: ${PR_URL}" >> "$GITHUB_STEP_SUMMARY"
elif [ -z "${PR_OP}" ] && [ "${JOB_STATUS}" = "success" ]; then
echo "### ⏭️ Skipped: profiles already up to date" >> "$GITHUB_STEP_SUMMARY"
elif [ "${JOB_STATUS}" = "failure" ]; then
echo "### ❌ Job failed — check step logs for details" >> "$GITHUB_STEP_SUMMARY"
fi
================================================
FILE: .github/workflows/_release.yml
================================================
# Builds and publishes LangChain packages to PyPI.
#
# Manually triggered, though can be used as a reusable workflow (workflow_call).
#
# Handles version bumping, building, and publishing to PyPI with authentication.
name: "🚀 Package Release"
run-name: "Release ${{ inputs.working-directory }} ${{ inputs.release-version }}"
on:
workflow_call:
inputs:
working-directory:
required: true
type: string
description: "From which folder this pipeline executes"
workflow_dispatch:
inputs:
working-directory:
required: true
type: string
description: "From which folder this pipeline executes"
default: "libs/langchain_v1"
release-version:
required: true
type: string
default: "0.1.0"
description: "New version of package being released"
dangerous-nonmaster-release:
required: false
type: boolean
default: false
description: "Release from a non-master branch (danger!) - Only use for hotfixes"
env:
PYTHON_VERSION: "3.11"
UV_FROZEN: "true"
UV_NO_SYNC: "true"
permissions:
contents: read # Job-level overrides grant write only where needed (mark-release)
jobs:
# Build the distribution package and extract version info
# Runs in isolated environment with minimal permissions for security
build:
if: github.ref == 'refs/heads/master' || inputs.dangerous-nonmaster-release
environment: Scheduled testing
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
pkg-name: ${{ steps.check-version.outputs.pkg-name }}
version: ${{ steps.check-version.outputs.version }}
steps:
- uses: actions/checkout@v6
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
# We want to keep this build stage *separate* from the release stage,
# so that there's no sharing of permissions between them.
# (Release stage has trusted publishing and GitHub repo contents write access,
#
# Otherwise, a malicious `build` step (e.g. via a compromised dependency)
# could get access to our GitHub or PyPI credentials.
#
# Per the trusted publishing GitHub Action:
# > It is strongly advised to separate jobs for building [...]
# > from the publish job.
# https://github.com/pypa/gh-action-pypi-publish#non-goals
- name: Build project for distribution
run: uv build
working-directory: ${{ inputs.working-directory }}
- name: Upload build
uses: actions/upload-artifact@v7
with:
name: dist
path: ${{ inputs.working-directory }}/dist/
- name: Check version
id: check-version
shell: python
working-directory: ${{ inputs.working-directory }}
run: |
import os
import tomllib
with open("pyproject.toml", "rb") as f:
data = tomllib.load(f)
pkg_name = data["project"]["name"]
version = data["project"]["version"]
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"pkg-name={pkg_name}\n")
f.write(f"version={version}\n")
release-notes:
# release-notes must run before publishing because its check-tags step
# validates version/tag state — do not remove this dependency.
needs:
- build
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
release-body: ${{ steps.generate-release-body.outputs.release-body }}
steps:
- uses: actions/checkout@v6
with:
repository: langchain-ai/langchain
path: langchain
sparse-checkout: | # this only grabs files for relevant dir
${{ inputs.working-directory }}
ref: ${{ github.ref }} # this scopes to just ref'd branch
fetch-depth: 0 # this fetches entire commit history
- name: Check tags
id: check-tags
shell: bash
working-directory: langchain/${{ inputs.working-directory }}
env:
PKG_NAME: ${{ needs.build.outputs.pkg-name }}
VERSION: ${{ needs.build.outputs.version }}
run: |
# Handle regular versions and pre-release versions differently
if [[ "$VERSION" == *"-"* ]]; then
# This is a pre-release version (contains a hyphen)
# Extract the base version without the pre-release suffix
BASE_VERSION=${VERSION%%-*}
# Look for the latest release of the same base version
REGEX="^$PKG_NAME==$BASE_VERSION\$"
PREV_TAG=$(git tag --sort=-creatordate | (grep -P "$REGEX" || true) | head -1)
# If no exact base version match, look for the latest release of any kind
if [ -z "$PREV_TAG" ]; then
REGEX="^$PKG_NAME==\\d+\\.\\d+\\.\\d+\$"
PREV_TAG=$(git tag --sort=-creatordate | (grep -P "$REGEX" || true) | head -1)
fi
else
# Regular version handling
PREV_TAG="$PKG_NAME==${VERSION%.*}.$(( ${VERSION##*.} - 1 ))"; [[ "${VERSION##*.}" -eq 0 ]] && PREV_TAG=""
# backup case if releasing e.g. 0.3.0, looks up last release
# note if last release (chronologically) was e.g. 0.1.47 it will get
# that instead of the last 0.2 release
if [ -z "$PREV_TAG" ]; then
REGEX="^$PKG_NAME==\\d+\\.\\d+\\.\\d+\$"
echo $REGEX
PREV_TAG=$(git tag --sort=-creatordate | (grep -P $REGEX || true) | head -1)
fi
fi
# if PREV_TAG is empty or came out to 0.0.0, let it be empty
if [ -z "$PREV_TAG" ] || [ "$PREV_TAG" = "$PKG_NAME==0.0.0" ]; then
echo "No previous tag found - first release"
else
# confirm prev-tag actually exists in git repo with git tag
GIT_TAG_RESULT=$(git tag -l "$PREV_TAG")
if [ -z "$GIT_TAG_RESULT" ]; then
echo "Previous tag $PREV_TAG not found in git repo"
exit 1
fi
fi
TAG="${PKG_NAME}==${VERSION}"
if [ "$TAG" == "$PREV_TAG" ]; then
echo "No new version to release"
exit 1
fi
echo tag="$TAG" >> $GITHUB_OUTPUT
echo prev-tag="$PREV_TAG" >> $GITHUB_OUTPUT
- name: Generate release body
id: generate-release-body
working-directory: langchain
env:
WORKING_DIR: ${{ inputs.working-directory }}
PKG_NAME: ${{ needs.build.outputs.pkg-name }}
TAG: ${{ steps.check-tags.outputs.tag }}
PREV_TAG: ${{ steps.check-tags.outputs.prev-tag }}
run: |
PREAMBLE="Changes since $PREV_TAG"
# if PREV_TAG is empty or 0.0.0, then we are releasing the first version
if [ -z "$PREV_TAG" ] || [ "$PREV_TAG" = "$PKG_NAME==0.0.0" ]; then
PREAMBLE="Initial release"
PREV_TAG=$(git rev-list --max-parents=0 HEAD)
fi
{
echo 'release-body<<EOF'
echo $PREAMBLE
echo
git log --format="%s" "$PREV_TAG"..HEAD -- $WORKING_DIR
echo EOF
} >> "$GITHUB_OUTPUT"
test-pypi-publish:
# release-notes must run before publishing because its check-tags step
# validates version/tag state — do not remove this dependency.
needs:
- build
- release-notes
runs-on: ubuntu-latest
permissions:
# This permission is used for trusted publishing:
# https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/
#
# Trusted publishing has to also be configured on PyPI for each package:
# https://docs.pypi.org/trusted-publishers/adding-a-publisher/
id-token: write
steps:
- uses: actions/checkout@v6
- uses: actions/download-artifact@v8
with:
name: dist
path: ${{ inputs.working-directory }}/dist/
- name: Publish to test PyPI
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1
with:
packages-dir: ${{ inputs.working-directory }}/dist/
verbose: true
print-hash: true
repository-url: https://test.pypi.org/legacy/
# We overwrite any existing distributions with the same name and version.
# This is *only for CI use* and is *extremely dangerous* otherwise!
# https://github.com/pypa/gh-action-pypi-publish#tolerating-release-package-file-duplicates
skip-existing: true
# Temp workaround since attestations are on by default as of gh-action-pypi-publish v1.11.0
attestations: false
pre-release-checks:
needs:
- build
- release-notes
- test-pypi-publish
runs-on: ubuntu-latest
permissions:
contents: read
timeout-minutes: 20
steps:
- uses: actions/checkout@v6
# We explicitly *don't* set up caching here. This ensures our tests are
# maximally sensitive to catching breakage.
#
# For example, here's a way that caching can cause a falsely-passing test:
# - Make the langchain package manifest no longer list a dependency package
# as a requirement. This means it won't be installed by `pip install`,
# and attempting to use it would cause a crash.
# - That dependency used to be required, so it may have been cached.
# When restoring the venv packages from cache, that dependency gets included.
# - Tests pass, because the dependency is present even though it wasn't specified.
# - The package is published, and it breaks on the missing dependency when
# used in the real world.
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
id: setup-python
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v8
with:
name: dist
path: ${{ inputs.working-directory }}/dist/
- name: Import dist package
shell: bash
working-directory: ${{ inputs.working-directory }}
env:
PKG_NAME: ${{ needs.build.outputs.pkg-name }}
VERSION: ${{ needs.build.outputs.version }}
# Here we use:
# - The default regular PyPI index as the *primary* index, meaning
# that it takes priority (https://pypi.org/simple)
# - The test PyPI index as an extra index, so that any dependencies that
# are not found on test PyPI can be resolved and installed anyway.
# (https://test.pypi.org/simple). This will include the PKG_NAME==VERSION
# package because VERSION will not have been uploaded to regular PyPI yet.
# - attempt install again after 5 seconds if it fails because there is
# sometimes a delay in availability on test pypi
run: |
uv venv
VIRTUAL_ENV=.venv uv pip install dist/*.whl
# Replace all dashes in the package name with underscores,
# since that's how Python imports packages with dashes in the name.
# also remove _official suffix
IMPORT_NAME="$(echo "$PKG_NAME" | sed s/-/_/g | sed s/_official//g)"
uv run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
- name: Import test dependencies
run: uv sync --group test
working-directory: ${{ inputs.working-directory }}
# Overwrite the local version of the package with the built version
- name: Import published package (again)
working-directory: ${{ inputs.working-directory }}
shell: bash
env:
PKG_NAME: ${{ needs.build.outputs.pkg-name }}
VERSION: ${{ needs.build.outputs.version }}
run: |
VIRTUAL_ENV=.venv uv pip install dist/*.whl
- name: Check for prerelease versions
# Block release if any dependencies allow prerelease versions
# (unless this is itself a prerelease version)
working-directory: ${{ inputs.working-directory }}
run: |
uv run python $GITHUB_WORKSPACE/.github/scripts/check_prerelease_dependencies.py pyproject.toml
- name: Run unit tests
run: make tests
working-directory: ${{ inputs.working-directory }}
- name: Get minimum versions
# Find the minimum published versions that satisfies the given constraints
working-directory: ${{ inputs.working-directory }}
id: min-version
run: |
VIRTUAL_ENV=.venv uv pip install packaging requests
python_version="$(uv run python --version | awk '{print $2}')"
min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
echo "min-versions=$min_versions"
- name: Run unit tests with minimum dependency versions
if: ${{ steps.min-version.outputs.min-versions != '' }}
env:
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
run: |
VIRTUAL_ENV=.venv uv pip install --force-reinstall --editable .
VIRTUAL_ENV=.venv uv pip install --force-reinstall $MIN_VERSIONS
make tests
working-directory: ${{ inputs.working-directory }}
- name: Import integration test dependencies
run: uv sync --group test --group test_integration
working-directory: ${{ inputs.working-directory }}
- name: Run integration tests
# Uses the Makefile's `integration_tests` target for the specified package
if: ${{ startsWith(inputs.working-directory, 'libs/partners/') }}
env:
AI21_API_KEY: ${{ secrets.AI21_API_KEY }}
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }}
AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }}
NVIDIA_API_KEY: ${{ secrets.NVIDIA_API_KEY }}
GOOGLE_SEARCH_API_KEY: ${{ secrets.GOOGLE_SEARCH_API_KEY }}
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
NOMIC_API_KEY: ${{ secrets.NOMIC_API_KEY }}
WATSONX_APIKEY: ${{ secrets.WATSONX_APIKEY }}
WATSONX_PROJECT_ID: ${{ secrets.WATSONX_PROJECT_ID }}
ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }}
ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }}
ASTRA_DB_KEYSPACE: ${{ secrets.ASTRA_DB_KEYSPACE }}
ES_URL: ${{ secrets.ES_URL }}
ES_CLOUD_ID: ${{ secrets.ES_CLOUD_ID }}
ES_API_KEY: ${{ secrets.ES_API_KEY }}
MONGODB_ATLAS_URI: ${{ secrets.MONGODB_ATLAS_URI }}
UPSTAGE_API_KEY: ${{ secrets.UPSTAGE_API_KEY }}
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
XAI_API_KEY: ${{ secrets.XAI_API_KEY }}
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
PPLX_API_KEY: ${{ secrets.PPLX_API_KEY }}
OLLAMA_API_KEY: ${{ secrets.OLLAMA_API_KEY }}
OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }}
LANGCHAIN_TESTS_USER_AGENT: ${{ secrets.LANGCHAIN_TESTS_USER_AGENT }}
run: make integration_tests
working-directory: ${{ inputs.working-directory }}
# Test select published packages against new core
# Done when code changes are made to langchain-core
test-prior-published-packages-against-new-core:
# Installs the new core with old partners: Installs the new unreleased core
# alongside the previously published partner packages and runs integration tests
needs:
- build
- release-notes
- test-pypi-publish
- pre-release-checks
runs-on: ubuntu-latest
permissions:
contents: read
if: false # temporarily skip
strategy:
matrix:
partner: [anthropic]
fail-fast: false # Continue testing other partners if one fails
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
ANTHROPIC_FILES_API_IMAGE_ID: ${{ secrets.ANTHROPIC_FILES_API_IMAGE_ID }}
ANTHROPIC_FILES_API_PDF_ID: ${{ secrets.ANTHROPIC_FILES_API_PDF_ID }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }}
AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }}
LANGCHAIN_TESTS_USER_AGENT: ${{ secrets.LANGCHAIN_TESTS_USER_AGENT }}
steps:
- uses: actions/checkout@v6
# We implement this conditional as Github Actions does not have good support
# for conditionally needing steps. https://github.com/actions/runner/issues/491
# TODO: this seems to be resolved upstream, so we can probably remove this workaround
- name: Check if libs/core
run: |
if [ "${{ startsWith(inputs.working-directory, 'libs/core') }}" != "true" ]; then
echo "Not in libs/core. Exiting successfully."
exit 0
fi
- name: Set up Python + uv
if: startsWith(inputs.working-directory, 'libs/core')
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v8
if: startsWith(inputs.working-directory, 'libs/core')
with:
name: dist
path: ${{ inputs.working-directory }}/dist/
- name: Test against ${{ matrix.partner }}
if: startsWith(inputs.working-directory, 'libs/core')
run: |
# Identify latest tag, excluding pre-releases
LATEST_PACKAGE_TAG="$(
git ls-remote --tags origin "langchain-${{ matrix.partner }}*" \
| awk '{print $2}' \
| sed 's|refs/tags/||' \
| grep -E '[0-9]+\.[0-9]+\.[0-9]+$' \
| sort -Vr \
| head -n 1
)"
echo "Latest package tag: $LATEST_PACKAGE_TAG"
# Shallow-fetch just that single tag
git fetch --depth=1 origin tag "$LATEST_PACKAGE_TAG"
# Checkout the latest package files
rm -rf $GITHUB_WORKSPACE/libs/partners/${{ matrix.partner }}/*
rm -rf $GITHUB_WORKSPACE/libs/standard-tests/*
cd $GITHUB_WORKSPACE/libs/
git checkout "$LATEST_PACKAGE_TAG" -- standard-tests/
git checkout "$LATEST_PACKAGE_TAG" -- partners/${{ matrix.partner }}/
cd partners/${{ matrix.partner }}
# Print as a sanity check
echo "Version number from pyproject.toml: "
cat pyproject.toml | grep "version = "
# Run tests
uv sync --group test --group test_integration
uv pip install ../../core/dist/*.whl
make integration_tests
# Test external packages that depend on langchain-core/langchain against the new release
# Only runs for core and langchain_v1 releases to catch breaking changes before publish
test-dependents:
name: "🐍 Python ${{ matrix.python-version }}: ${{ matrix.package.path }}"
needs:
- build
- release-notes
- test-pypi-publish
- pre-release-checks
runs-on: ubuntu-latest
permissions:
contents: read
# Only run for core or langchain_v1 releases
if: startsWith(inputs.working-directory, 'libs/core') || startsWith(inputs.working-directory, 'libs/langchain_v1')
strategy:
fail-fast: false
matrix:
python-version: ["3.11", "3.13"]
package:
- name: deepagents
repo: langchain-ai/deepagents
path: libs/deepagents
# No API keys needed for now - deepagents `make test` only runs unit tests
steps:
- uses: actions/checkout@v6
with:
path: langchain
- uses: actions/checkout@v6
with:
repository: ${{ matrix.package.repo }}
path: ${{ matrix.package.name }}
- name: Set up Python + uv
uses: "./langchain/.github/actions/uv_setup"
with:
python-version: ${{ matrix.python-version }}
- uses: actions/download-artifact@v8
with:
name: dist
path: dist/
- name: Install ${{ matrix.package.name }} with local packages
# External dependents don't have [tool.uv.sources] pointing to this repo,
# so we install the package normally then override with the built wheel.
run: |
cd ${{ matrix.package.name }}/${{ matrix.package.path }}
# Install the package with test dependencies
uv sync --group test
# Override with the built wheel from this release
uv pip install $GITHUB_WORKSPACE/dist/*.whl
- name: Run ${{ matrix.package.name }} tests
run: |
cd ${{ matrix.package.name }}/${{ matrix.package.path }}
make test
publish:
# Publishes the package to PyPI
needs:
- build
- release-notes
- test-pypi-publish
- pre-release-checks
- test-dependents
# - test-prior-published-packages-against-new-core
# Run if all needed jobs succeeded or were skipped (test-dependents only runs for core/langchain_v1)
if: ${{ !cancelled() && !failure() }}
runs-on: ubuntu-latest
permissions:
# This permission is used for trusted publishing:
# https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/
#
# Trusted publishing has to also be configured on PyPI for each package:
# https://docs.pypi.org/trusted-publishers/adding-a-publisher/
id-token: write
defaults:
run:
working-directory: ${{ inputs.working-directory }}
steps:
- uses: actions/checkout@v6
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v8
with:
name: dist
path: ${{ inputs.working-directory }}/dist/
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1
with:
packages-dir: ${{ inputs.working-directory }}/dist/
verbose: true
print-hash: true
# Temp workaround since attestations are on by default as of gh-action-pypi-publish v1.11.0
attestations: false
mark-release:
# Marks the GitHub release with the new version tag
needs:
- build
- release-notes
- test-pypi-publish
- pre-release-checks
- publish
# Run if all needed jobs succeeded or were skipped (test-dependents only runs for core/langchain_v1)
if: ${{ !cancelled() && !failure() }}
runs-on: ubuntu-latest
permissions:
# This permission is needed by `ncipollo/release-action` to
# create the GitHub release/tag
contents: write
defaults:
run:
working-directory: ${{ inputs.working-directory }}
steps:
- uses: actions/checkout@v6
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
- uses: actions/download-artifact@v8
with:
name: dist
path: ${{ inputs.working-directory }}/dist/
- name: Create Tag
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1
with:
artifacts: "dist/*"
token: ${{ secrets.GITHUB_TOKEN }}
generateReleaseNotes: false
tag: ${{needs.build.outputs.pkg-name}}==${{ needs.build.outputs.version }}
body: ${{ needs.release-notes.outputs.release-body }}
commit: ${{ github.sha }}
makeLatest: ${{ needs.build.outputs.pkg-name == 'langchain-core'}}
================================================
FILE: .github/workflows/_test.yml
================================================
# Runs unit tests with both current and minimum supported dependency versions
# to ensure compatibility across the supported range.
name: "🧪 Unit Testing"
on:
workflow_call:
inputs:
working-directory:
required: true
type: string
description: "From which folder this pipeline executes"
python-version:
required: true
type: string
description: "Python version to use"
permissions:
contents: read
env:
UV_FROZEN: "true"
UV_NO_SYNC: "true"
jobs:
# Main test job - runs unit tests with current deps, then retests with minimum versions
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "Python ${{ inputs.python-version }}"
steps:
- name: "📋 Checkout Code"
uses: actions/checkout@v6
- name: "🐍 Set up Python ${{ inputs.python-version }} + UV"
uses: "./.github/actions/uv_setup"
id: setup-python
with:
python-version: ${{ inputs.python-version }}
cache-suffix: test-${{ inputs.working-directory }}
working-directory: ${{ inputs.working-directory }}
- name: "📦 Install Test Dependencies"
shell: bash
run: uv sync --group test --dev
- name: "🧪 Run Core Unit Tests"
shell: bash
run: |
make test PYTEST_EXTRA=-q
- name: "🔍 Calculate Minimum Dependency Versions"
working-directory: ${{ inputs.working-directory }}
id: min-version
shell: bash
run: |
VIRTUAL_ENV=.venv uv pip install packaging tomli requests
python_version="$(uv run python --version | awk '{print $2}')"
min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
echo "min-versions=$min_versions"
- name: "🧪 Run Tests with Minimum Dependencies"
if: ${{ steps.min-version.outputs.min-versions != '' }}
env:
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
run: |
VIRTUAL_ENV=.venv uv pip install $MIN_VERSIONS
make tests PYTEST_EXTRA=-q
working-directory: ${{ inputs.working-directory }}
- name: "🧹 Verify Clean Working Directory"
shell: bash
run: |
set -eu
STATUS="$(git status)"
echo "$STATUS"
# grep will exit non-zero if the target message isn't found,
# and `set -e` above will cause the step to fail.
echo "$STATUS" | grep 'nothing to commit, working tree clean'
================================================
FILE: .github/workflows/_test_pydantic.yml
================================================
# Facilitate unit testing against different Pydantic versions for a provided package.
name: "🐍 Pydantic Version Testing"
on:
workflow_call:
inputs:
working-directory:
required: true
type: string
description: "From which folder this pipeline executes"
python-version:
required: false
type: string
description: "Python version to use"
default: "3.12"
pydantic-version:
required: true
type: string
description: "Pydantic version to test."
permissions:
contents: read
env:
UV_FROZEN: "true"
UV_NO_SYNC: "true"
jobs:
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "Pydantic ~=${{ inputs.pydantic-version }}"
steps:
- name: "📋 Checkout Code"
uses: actions/checkout@v6
- name: "🐍 Set up Python ${{ inputs.python-version }} + UV"
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
cache-suffix: test-pydantic-${{ inputs.working-directory }}
working-directory: ${{ inputs.working-directory }}
- name: "📦 Install Test Dependencies"
shell: bash
run: uv sync --group test
- name: "🔄 Install Specific Pydantic Version"
shell: bash
env:
PYDANTIC_VERSION: ${{ inputs.pydantic-version }}
run: VIRTUAL_ENV=.venv uv pip install "pydantic~=$PYDANTIC_VERSION"
- name: "🧪 Run Core Tests"
shell: bash
run: |
make test
- name: "🧹 Verify Clean Working Directory"
shell: bash
run: |
set -eu
STATUS="$(git status)"
echo "$STATUS"
# grep will exit non-zero if the target message isn't found,
# and `set -e` above will cause the step to fail.
echo "$STATUS" | grep 'nothing to commit, working tree clean'
================================================
FILE: .github/workflows/auto-label-by-package.yml
================================================
name: Auto Label Issues by Package
on:
issues:
types: [opened, edited]
permissions:
contents: read
jobs:
label-by-package:
permissions:
issues: write
runs-on: ubuntu-latest
steps:
- name: Sync package labels
uses: actions/github-script@v8
with:
script: |
const body = context.payload.issue.body || "";
// Extract text under "### Package" (handles " (Required)" suffix and being last section)
const match = body.match(/### Package[^\n]*\n([\s\S]*?)(?:\n###|$)/i);
if (!match) return;
const packageSection = match[1].trim();
// Mapping table for package names to labels
const mapping = {
"langchain": "langchain",
"langchain-openai": "openai",
"langchain-anthropic": "anthropic",
"langchain-classic": "langchain-classic",
"langchain-core": "core",
"langchain-model-profiles": "model-profiles",
"langchain-tests": "standard-tests",
"langchain-text-splitters": "text-splitters",
"langchain-chroma": "chroma",
"langchain-deepseek": "deepseek",
"langchain-exa": "exa",
"langchain-fireworks": "fireworks",
"langchain-groq": "groq",
"langchain-huggingface": "huggingface",
"langchain-mistralai": "mistralai",
"langchain-nomic": "nomic",
"langchain-ollama": "ollama",
"langchain-openrouter": "openrouter",
"langchain-perplexity": "perplexity",
"langchain-qdrant": "qdrant",
"langchain-xai": "xai",
};
// All possible package labels we manage
const allPackageLabels = Object.values(mapping);
const selectedLabels = [];
// Check if this is checkbox format (multiple selection)
const checkboxMatches = packageSection.match(/- \[x\]\s+([^\n\r]+)/gi);
if (checkboxMatches) {
// Handle checkbox format
for (const match of checkboxMatches) {
const packageName = match.replace(/- \[x\]\s+/i, '').trim();
const label = mapping[packageName];
if (label && !selectedLabels.includes(label)) {
selectedLabels.push(label);
}
}
} else {
// Handle dropdown format (single selection)
const label = mapping[packageSection];
if (label) {
selectedLabels.push(label);
}
}
// Get current issue labels
const issue = await github.rest.issues.get({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number
});
const currentLabels = issue.data.labels.map(label => label.name);
const currentPackageLabels = currentLabels.filter(label => allPackageLabels.includes(label));
// Determine labels to add and remove
const labelsToAdd = selectedLabels.filter(label => !currentPackageLabels.includes(label));
const labelsToRemove = currentPackageLabels.filter(label => !selectedLabels.includes(label));
// Add new labels
if (labelsToAdd.length > 0) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: labelsToAdd
});
}
// Remove old labels
for (const label of labelsToRemove) {
await github.rest.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
name: label
});
}
================================================
FILE: .github/workflows/check_agents_sync.yml
================================================
# Ensures CLAUDE.md and AGENTS.md stay synchronized.
#
# These files contain the same development guidelines but are named differently
# for compatibility with different AI coding assistants (Claude Code uses CLAUDE.md,
# other tools may use AGENTS.md).
name: "🔄 Check CLAUDE.md / AGENTS.md Sync"
on:
push:
branches: [master]
paths:
- "CLAUDE.md"
- "AGENTS.md"
pull_request:
paths:
- "CLAUDE.md"
- "AGENTS.md"
permissions:
contents: read
jobs:
check-sync:
name: "verify files are identical"
runs-on: ubuntu-latest
steps:
- name: "📋 Checkout Code"
uses: actions/checkout@v6
- name: "🔍 Check CLAUDE.md and AGENTS.md are in sync"
run: |
if ! diff -q CLAUDE.md AGENTS.md > /dev/null 2>&1; then
echo "❌ CLAUDE.md and AGENTS.md are out of sync!"
echo ""
echo "These files must contain identical content."
echo "Differences:"
echo ""
diff --color=always CLAUDE.md AGENTS.md || true
exit 1
fi
echo "✅ CLAUDE.md and AGENTS.md are in sync"
================================================
FILE: .github/workflows/check_core_versions.yml
================================================
# Ensures version numbers in pyproject.toml and version.py stay in sync.
#
# (Prevents releases with mismatched version numbers)
name: "🔍 Check Version Equality"
on:
pull_request:
paths:
- "libs/core/pyproject.toml"
- "libs/core/langchain_core/version.py"
- "libs/partners/anthropic/pyproject.toml"
- "libs/partners/anthropic/langchain_anthropic/_version.py"
permissions:
contents: read
jobs:
check_version_equality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: "✅ Verify pyproject.toml & version.py Match"
run: |
# Check core versions
CORE_PYPROJECT_VERSION=$(grep -Po '(?<=^version = ")[^"]*' libs/core/pyproject.toml)
CORE_VERSION_PY_VERSION=$(grep -Po '(?<=^VERSION = ")[^"]*' libs/core/langchain_core/version.py)
# Compare core versions
if [ "$CORE_PYPROJECT_VERSION" != "$CORE_VERSION_PY_VERSION" ]; then
echo "langchain-core versions in pyproject.toml and version.py do not match!"
echo "pyproject.toml version: $CORE_PYPROJECT_VERSION"
echo "version.py version: $CORE_VERSION_PY_VERSION"
exit 1
else
echo "Core versions match: $CORE_PYPROJECT_VERSION"
fi
# Check langchain_v1 versions
LANGCHAIN_PYPROJECT_VERSION=$(grep -Po '(?<=^version = ")[^"]*' libs/langchain_v1/pyproject.toml)
LANGCHAIN_INIT_PY_VERSION=$(grep -Po '(?<=^__version__ = ")[^"]*' libs/langchain_v1/langchain/__init__.py)
# Compare langchain_v1 versions
if [ "$LANGCHAIN_PYPROJECT_VERSION" != "$LANGCHAIN_INIT_PY_VERSION" ]; then
echo "langchain_v1 versions in pyproject.toml and __init__.py do not match!"
echo "pyproject.toml version: $LANGCHAIN_PYPROJECT_VERSION"
echo "version.py version: $LANGCHAIN_INIT_PY_VERSION"
exit 1
else
echo "Langchain v1 versions match: $LANGCHAIN_PYPROJECT_VERSION"
fi
# Check langchain-anthropic versions
ANTHROPIC_PYPROJECT_VERSION=$(grep -Po '(?<=^version = ")[^"]*' libs/partners/anthropic/pyproject.toml)
ANTHROPIC_VERSION_PY_VERSION=$(grep -Po '(?<=^__version__ = ")[^"]*' libs/partners/anthropic/langchain_anthropic/_version.py)
# Compare langchain-anthropic versions
if [ "$ANTHROPIC_PYPROJECT_VERSION" != "$ANTHROPIC_VERSION_PY_VERSION" ]; then
echo "langchain-anthropic versions in pyproject.toml and _version.py do not match!"
echo "pyproject.toml version: $ANTHROPIC_PYPROJECT_VERSION"
echo "_version.py version: $ANTHROPIC_VERSION_PY_VERSION"
exit 1
else
echo "Langchain-anthropic versions match: $ANTHROPIC_PYPROJECT_VERSION"
fi
================================================
FILE: .github/workflows/check_diffs.yml
================================================
# Primary CI workflow.
#
# Only runs against packages that have changed files.
#
# Runs:
# - Linting (_lint.yml)
# - Unit Tests (_test.yml)
# - Pydantic compatibility tests (_test_pydantic.yml)
# - Integration test compilation checks (_compile_integration_test.yml)
# - Extended test suites that require additional dependencies
#
# Reports status to GitHub checks and PR status.
name: "🔧 CI"
on:
push:
branches: [master]
pull_request:
merge_group:
# Optimizes CI performance by canceling redundant workflow runs
# If another push to the same PR or branch happens while this workflow is still running,
# cancel the earlier run in favor of the next run.
#
# There's no point in testing an outdated version of the code. GitHub only allows
# a limited number of job runners to be active at the same time, so it's better to
# cancel pointless jobs early so that more useful jobs can run sooner.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
env:
UV_FROZEN: "true"
UV_NO_SYNC: "true"
jobs:
# This job analyzes which files changed and creates a dynamic test matrix
# to only run tests/lints for the affected packages, improving CI efficiency
build:
name: "Detect Changes & Set Matrix"
runs-on: ubuntu-latest
if: ${{ !contains(github.event.pull_request.labels.*.name, 'ci-ignore') }}
steps:
- name: "📋 Checkout Code"
uses: actions/checkout@v6
- name: "🐍 Setup Python 3.11"
uses: actions/setup-python@v6
with:
python-version: "3.11"
- name: "📂 Get Changed Files"
id: files
uses: Ana06/get-changed-files@25f79e676e7ea1868813e21465014798211fad8c # v2.3.0
- name: "🔍 Analyze Changed Files & Generate Build Matrix"
id: set-matrix
run: |
python -m pip install packaging requests
python .github/scripts/check_diff.py ${{ steps.files.outputs.all }} >> $GITHUB_OUTPUT
outputs:
lint: ${{ steps.set-matrix.outputs.lint }}
test: ${{ steps.set-matrix.outputs.test }}
extended-tests: ${{ steps.set-matrix.outputs.extended-tests }}
compile-integration-tests: ${{ steps.set-matrix.outputs.compile-integration-tests }}
dependencies: ${{ steps.set-matrix.outputs.dependencies }}
test-pydantic: ${{ steps.set-matrix.outputs.test-pydantic }}
# Run linting only on packages that have changed files
lint:
needs: [build]
if: ${{ needs.build.outputs.lint != '[]' }}
strategy:
matrix:
job-configs: ${{ fromJson(needs.build.outputs.lint) }}
fail-fast: false
uses: ./.github/workflows/_lint.yml
with:
working-directory: ${{ matrix.job-configs.working-directory }}
python-version: ${{ matrix.job-configs.python-version }}
secrets: inherit
# Run unit tests only on packages that have changed files
test:
needs: [build]
if: ${{ needs.build.outputs.test != '[]' }}
strategy:
matrix:
job-configs: ${{ fromJson(needs.build.outputs.test) }}
fail-fast: false
uses: ./.github/workflows/_test.yml
with:
working-directory: ${{ matrix.job-configs.working-directory }}
python-version: ${{ matrix.job-configs.python-version }}
secrets: inherit
# Test compatibility with different Pydantic versions for affected packages
test-pydantic:
needs: [build]
if: ${{ needs.build.outputs.test-pydantic != '[]' }}
strategy:
matrix:
job-configs: ${{ fromJson(needs.build.outputs.test-pydantic) }}
fail-fast: false
uses: ./.github/workflows/_test_pydantic.yml
with:
working-directory: ${{ matrix.job-configs.working-directory }}
pydantic-version: ${{ matrix.job-configs.pydantic-version }}
secrets: inherit
# Verify integration tests compile without actually running them (faster feedback)
compile-integration-tests:
name: "Compile Integration Tests"
needs: [build]
if: ${{ needs.build.outputs.compile-integration-tests != '[]' }}
strategy:
matrix:
job-configs: ${{ fromJson(needs.build.outputs.compile-integration-tests) }}
fail-fast: false
uses: ./.github/workflows/_compile_integration_test.yml
with:
working-directory: ${{ matrix.job-configs.working-directory }}
python-version: ${{ matrix.job-configs.python-version }}
secrets: inherit
# Run extended test suites that require additional dependencies
extended-tests:
name: "Extended Tests"
needs: [build]
if: ${{ needs.build.outputs.extended-tests != '[]' }}
strategy:
matrix:
# note different variable for extended test dirs
job-configs: ${{ fromJson(needs.build.outputs.extended-tests) }}
fail-fast: false
runs-on: ubuntu-latest
timeout-minutes: 20
defaults:
run:
working-directory: ${{ matrix.job-configs.working-directory }}
steps:
- uses: actions/checkout@v6
- name: "🐍 Set up Python ${{ matrix.job-configs.python-version }} + UV"
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ matrix.job-configs.python-version }}
cache-suffix: extended-tests-${{ matrix.job-configs.working-directory }}
working-directory: ${{ matrix.job-configs.working-directory }}
- name: "📦 Install Dependencies & Run Extended Tests"
shell: bash
run: |
echo "Running extended tests, installing dependencies with uv..."
uv venv
uv sync --group test
VIRTUAL_ENV=.venv uv pip install -r extended_testing_deps.txt
VIRTUAL_ENV=.venv make extended_tests
- name: "🧹 Verify Clean Working Directory"
shell: bash
run: |
set -eu
STATUS="$(git status)"
echo "$STATUS"
# grep will exit non-zero if the target message isn't found,
# and `set -e` above will cause the step to fail.
echo "$STATUS" | grep 'nothing to commit, working tree clean'
# Final status check - ensures all required jobs passed before allowing merge
ci_success:
name: "✅ CI Success"
needs:
[
build,
lint,
test,
compile-integration-tests,
extended-tests,
test-pydantic,
]
if: |
always()
runs-on: ubuntu-latest
env:
JOBS_JSON: ${{ toJSON(needs) }}
RESULTS_JSON: ${{ toJSON(needs.*.result) }}
EXIT_CODE: ${{!contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') && '0' || '1'}}
steps:
- name: "🎉 All Checks Passed"
run: |
echo $JOBS_JSON
echo $RESULTS_JSON
echo "Exiting with $EXIT_CODE"
exit $EXIT_CODE
================================================
FILE: .github/workflows/close_unchecked_issues.yml
================================================
# Auto-close issues that bypass or ignore the issue template checkboxes.
#
# GitHub issue forms enforce `required: true` checkboxes in the web UI,
# but the API bypasses form validation entirely — bots/scripts can open
# issues with every box unchecked or skip the template altogether.
#
# Rules:
# 1. Checkboxes present, none checked → close
# 2. No checkboxes at all → close unless author is an org member or bot
#
# Org membership check reuses the shared helper from pr-labeler.js and
# the same GitHub App used by tag-external-issues.yml.
name: Close Unchecked Issues
on:
issues:
types: [opened]
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
cancel-in-progress: true
jobs:
check-boxes:
runs-on: ubuntu-latest
permissions:
contents: read
issues: write
steps:
- uses: actions/checkout@v6
- name: Generate GitHub App token
id: app-token
uses: actions/create-github-app-token@v3
with:
app-id: ${{ secrets.ORG_MEMBERSHIP_APP_ID }}
private-key: ${{ secrets.ORG_MEMBERSHIP_APP_PRIVATE_KEY }}
- name: Validate issue checkboxes
if: steps.app-token.outcome == 'success'
uses: actions/github-script@v8
with:
github-token: ${{ steps.app-token.outputs.token }}
script: |
const body = context.payload.issue.body ?? '';
const checked = (body.match(/- \[x\]/gi) || []).length;
if (checked > 0) {
console.log(`Found ${checked} checked checkbox(es) — OK`);
return;
}
const unchecked = (body.match(/- \[ \]/g) || []).length;
// No checkboxes at all — allow org members and bots, close everyone else
if (unchecked === 0) {
const { owner, repo } = context.repo;
const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
const author = context.payload.sender.login;
const { isExternal } = await h.checkMembership(
author, context.payload.sender.type,
);
if (!isExternal) {
console.log(`No checkboxes, but ${author} is internal — OK`);
return;
}
console.log(`No checkboxes and ${author} is external — closing`);
} else {
console.log(`Found 0 checked and ${unchecked} unchecked checkbox(es) — closing`);
}
const { owner, repo } = context.repo;
const issue_number = context.payload.issue.number;
const reason = unchecked > 0
? 'none of the required checkboxes were checked'
: 'no issue template was used';
// Close before commenting — a closed issue without a comment is
// less confusing than an open issue with a false "auto-closed" message
// if the second API call fails.
await github.rest.issues.update({
owner,
repo,
issue_number,
state: 'closed',
state_reason: 'not_planned',
});
await github.rest.issues.createComment({
owner,
repo,
issue_number,
body: [
`This issue was automatically closed because ${reason}.`,
'',
`Please use one of the [issue templates](https://github.com/${owner}/${repo}/issues/new/choose) and complete the checklist.`,
].join('\n'),
});
================================================
FILE: .github/workflows/codspeed.yml
================================================
# CodSpeed performance benchmarks.
#
# Runs benchmarks on changed packages and uploads results to CodSpeed.
# Separated from the main CI workflow so that push-to-master baseline runs
# are never cancelled by subsequent merges (cancel-in-progress is only
# enabled for pull_request events).
name: "⚡ CodSpeed"
on:
push:
branches: [master]
pull_request:
# On PRs, cancel stale runs when new commits are pushed.
# On push-to-master, never cancel — these runs populate CodSpeed baselines.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.sha || github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
permissions:
contents: read
env:
UV_FROZEN: "true"
UV_NO_SYNC: "true"
jobs:
build:
name: "Detect Changes"
runs-on: ubuntu-latest
if: ${{ !contains(github.event.pull_request.labels.*.name, 'codspeed-ignore') }}
steps:
- name: "📋 Checkout Code"
uses: actions/checkout@v6
- name: "🐍 Setup Python 3.11"
uses: actions/setup-python@v6
with:
python-version: "3.11"
- name: "📂 Get Changed Files"
id: files
uses: Ana06/get-changed-files@25f79e676e7ea1868813e21465014798211fad8c # v2.3.0
- name: "🔍 Analyze Changed Files"
id: set-matrix
run: |
python -m pip install packaging requests
python .github/scripts/check_diff.py ${{ steps.files.outputs.all }} >> $GITHUB_OUTPUT
outputs:
codspeed: ${{ steps.set-matrix.outputs.codspeed }}
benchmarks:
name: "⚡ CodSpeed Benchmarks"
needs: [build]
if: ${{ needs.build.outputs.codspeed != '[]' }}
runs-on: ubuntu-latest
strategy:
matrix:
job-configs: ${{ fromJson(needs.build.outputs.codspeed) }}
fail-fast: false
steps:
- uses: actions/checkout@v6
- name: "📦 Install UV Package Manager"
uses: astral-sh/setup-uv@0ca8f610542aa7f4acaf39e65cf4eb3c35091883 # v7
with:
# Pinned to 3.13.11 to work around CodSpeed walltime segfault on 3.13.12+
# See: https://github.com/CodSpeedHQ/pytest-codspeed/issues/106
python-version: "3.13.11"
- name: "📦 Install Test Dependencies"
run: uv sync --group test
working-directory: ${{ matrix.job-configs.working-directory }}
- name: "⚡ Run Benchmarks: ${{ matrix.job-configs.working-directory }}"
uses: CodSpeedHQ/action@a50965600eafa04edcd6717761f55b77e52aafbd # v4
with:
token: ${{ secrets.CODSPEED_TOKEN }}
run: |
cd ${{ matrix.job-configs.working-directory }}
if [ "${{ matrix.job-configs.working-directory }}" = "libs/core" ]; then
uv run --no-sync pytest ./tests/benchmarks --codspeed
else
uv run --no-sync pytest ./tests/unit_tests/ -m benchmark --codspeed
fi
mode: ${{ matrix.job-configs.codspeed-mode }}
================================================
FILE: .github/workflows/integration_tests.yml
================================================
# Routine integration tests against partner libraries with live API credentials.
#
# Uses `make integration_tests` within each library being tested.
#
# Runs daily with the option to trigger manually.
name: "⏰ Integration Tests"
run-name: "Run Integration Tests - ${{ inputs.working-directory-force || 'all libs' }} (Python ${{ inputs.python-version-force || '3.10, 3.13' }})"
on:
workflow_dispatch:
inputs:
working-directory-force:
type: string
description: "From which folder this pipeline executes - defaults to all in matrix - example value: libs/partners/anthropic"
python-version-force:
type: string
description: "Python version to use - defaults to 3.10 and 3.13 in matrix - example value: 3.11"
schedule:
- cron: "0 13 * * *" # Runs daily at 1PM UTC (9AM EDT/6AM PDT)
permissions:
contents: read
env:
UV_FROZEN: "true"
DEFAULT_LIBS: >-
["libs/partners/openai",
"libs/partners/anthropic",
"libs/partners/fireworks",
"libs/partners/groq",
"libs/partners/mistralai",
"libs/partners/xai",
"libs/partners/google-vertexai",
"libs/partners/google-genai",
"libs/partners/aws"]
jobs:
# Generate dynamic test matrix based on input parameters or defaults
# Only runs on the main repo (for scheduled runs) or when manually triggered
compute-matrix:
# Defend against forks running scheduled jobs, but allow manual runs from forks
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
runs-on: ubuntu-latest
name: "📋 Compute Test Matrix"
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
python-version-min-3-11: ${{ steps.set-matrix.outputs.python-version-min-3-11 }}
steps:
- name: "🔢 Generate Python & Library Matrix"
id: set-matrix
env:
DEFAULT_LIBS: ${{ env.DEFAULT_LIBS }}
WORKING_DIRECTORY_FORCE: ${{ github.event.inputs.working-directory-force || '' }}
PYTHON_VERSION_FORCE: ${{ github.event.inputs.python-version-force || '' }}
run: |
# echo "matrix=..." where matrix is a json formatted str with keys python-version and working-directory
# python-version should default to 3.10 and 3.13, but is overridden to [PYTHON_VERSION_FORCE] if set
# working-directory should default to DEFAULT_LIBS, but is overridden to [WORKING_DIRECTORY_FORCE] if set
python_version='["3.10", "3.13"]'
python_version_min_3_11='["3.11", "3.13"]'
working_directory="$DEFAULT_LIBS"
if [ -n "$PYTHON_VERSION_FORCE" ]; then
python_version="[\"$PYTHON_VERSION_FORCE\"]"
# Bound forced version to >= 3.11 for packages requiring it
if [ "$(echo "$PYTHON_VERSION_FORCE >= 3.11" | bc -l)" -eq 1 ]; then
python_version_min_3_11="[\"$PYTHON_VERSION_FORCE\"]"
else
python_version_min_3_11='["3.11"]'
fi
fi
if [ -n "$WORKING_DIRECTORY_FORCE" ]; then
working_directory="[\"$WORKING_DIRECTORY_FORCE\"]"
fi
matrix="{\"python-version\": $python_version, \"working-directory\": $working_directory}"
echo $matrix
echo "matrix=$matrix" >> $GITHUB_OUTPUT
echo "python-version-min-3-11=$python_version_min_3_11" >> $GITHUB_OUTPUT
# Run integration tests against partner libraries with live API credentials
integration-tests:
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
name: "🐍 Python ${{ matrix.python-version }}: ${{ matrix.working-directory }}"
runs-on: ubuntu-latest
needs: [compute-matrix]
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
python-version: ${{ fromJSON(needs.compute-matrix.outputs.matrix).python-version }}
working-directory: ${{ fromJSON(needs.compute-matrix.outputs.matrix).working-directory }}
steps:
- uses: actions/checkout@v6
with:
path: langchain
# These libraries exist outside of the monorepo and need to be checked out separately
- uses: actions/checkout@v6
with:
repository: langchain-ai/langchain-google
path: langchain-google
- name: "🔐 Authenticate to Google Cloud"
id: "auth"
uses: google-github-actions/auth@7c6bc770dae815cd3e89ee6cdf493a5fab2cc093 # v3
with:
credentials_json: "${{ secrets.GOOGLE_CREDENTIALS }}"
- uses: actions/checkout@v6
with:
repository: langchain-ai/langchain-aws
path: langchain-aws
- name: "🔐 Configure AWS Credentials"
uses: aws-actions/configure-aws-credentials@fb7eb401298e393da51cdcb2feb1ed0183619014 # v6
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: "📦 Organize External Libraries"
run: |
rm -rf \
langchain/libs/partners/google-genai \
langchain/libs/partners/google-vertexai
mv langchain-google/libs/genai langchain/libs/partners/google-genai
mv langchain-google/libs/vertexai langchain/libs/partners/google-vertexai
mv langchain-aws/libs/aws langchain/libs/partners/aws
- name: "🐍 Set up Python ${{ matrix.python-version }} + UV"
uses: "./langchain/.github/actions/uv_setup"
with:
python-version: ${{ matrix.python-version }}
- name: "📦 Install Dependencies"
# Partner packages use [tool.uv.sources] in their pyproject.toml to resolve
# langchain-core/langchain to local editable installs, so `uv sync` automatically
# tests against the versions from the current branch (not published releases).
# TODO: external google/aws don't have local resolution since they live in
# separate repos, so they pull `core`/`langchain_v1` from PyPI. We should update
# their dev groups to use git source dependencies pointing to the current
# branch's latest commit SHA to fully test against local langchain changes.
run: |
echo "Running scheduled tests, installing dependencies with uv..."
cd langchain/${{ matrix.working-directory }}
uv sync --group test --group test_integration
- name: "🚀 Run Integration Tests"
# WARNING: All secrets below are available to every matrix job regardless of
# which package is being tested. This is intentional for simplicity, but means
# any test file could technically access any key. Only use for trusted code.
env:
LANGCHAIN_TESTS_USER_AGENT: ${{ secrets.LANGCHAIN_TESTS_USER_AGENT }}
AI21_API_KEY: ${{ secrets.AI21_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
ANTHROPIC_FILES_API_IMAGE_ID: ${{ secrets.ANTHROPIC_FILES_API_IMAGE_ID }}
ANTHROPIC_FILES_API_PDF_ID: ${{ secrets.ANTHROPIC_FILES_API_PDF_ID }}
ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }}
ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }}
ASTRA_DB_KEYSPACE: ${{ secrets.ASTRA_DB_KEYSPACE }}
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }}
AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }}
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
ES_URL: ${{ secrets.ES_URL }}
ES_CLOUD_ID: ${{ secrets.ES_CLOUD_ID }}
ES_API_KEY: ${{ secrets.ES_API_KEY }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
GOOGLE_SEARCH_API_KEY: ${{ secrets.GOOGLE_SEARCH_API_KEY }}
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
MONGODB_ATLAS_URI: ${{ secrets.MONGODB_ATLAS_URI }}
NOMIC_API_KEY: ${{ secrets.NOMIC_API_KEY }}
NVIDIA_API_KEY: ${{ secrets.NVIDIA_API_KEY }}
OLLAMA_API_KEY: ${{ secrets.OLLAMA_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }}
PPLX_API_KEY: ${{ secrets.PPLX_API_KEY }}
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
UPSTAGE_API_KEY: ${{ secrets.UPSTAGE_API_KEY }}
WATSONX_APIKEY: ${{ secrets.WATSONX_APIKEY }}
WATSONX_PROJECT_ID: ${{ secrets.WATSONX_PROJECT_ID }}
XAI_API_KEY: ${{ secrets.XAI_API_KEY }}
run: |
cd langchain/${{ matrix.working-directory }}
make integration_tests
- name: "🧹 Clean up External Libraries"
# Clean up external libraries to avoid affecting the following git status check
run: |
rm -rf \
langchain/libs/partners/google-genai \
langchain/libs/partners/google-vertexai \
langchain/libs/partners/aws
- name: "🧹 Verify Clean Working Directory"
working-directory: langchain
run: |
set -eu
STATUS="$(git status)"
echo "$STATUS"
# grep will exit non-zero if the target message isn't found,
# and `set -e` above will cause the step to fail.
echo "$STATUS" | grep 'nothing to commit, working tree clean'
# Test dependent packages against local packages to catch breaking changes
test-dependents:
# Defend against forks running scheduled jobs, but allow manual runs from forks
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
name: "🐍 Python ${{ matrix.python-version }}: ${{ matrix.package.path }}"
runs-on: ubuntu-latest
needs: [compute-matrix]
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
# deepagents requires Python >= 3.11, use bounded version from compute-matrix
python-version: ${{ fromJSON(needs.compute-matrix.outputs.python-version-min-3-11) }}
package:
- name: deepagents
repo: langchain-ai/deepagents
path: libs/deepagents
steps:
- uses: actions/checkout@v6
with:
path: langchain
- uses: actions/checkout@v6
with:
repository: ${{ matrix.package.repo }}
path: ${{ matrix.package.name }}
- name: "🐍 Set up Python ${{ matrix.python-version }} + UV"
uses: "./langchain/.github/actions/uv_setup"
with:
python-version: ${{ matrix.python-version }}
- name: "📦 Install ${{ matrix.package.name }} with Local"
# Unlike partner packages (which use [tool.uv.sources] for local resolution),
# external dependents live in separate repos and need explicit overrides to
# test against the langchain versions from the current branch, as their
# pyproject.toml files point to released versions.
run: |
cd ${{ matrix.package.name }}/${{ matrix.package.path }}
# Install the package with test dependencies
uv sync --group test
# Override langchain packages with local versions
uv pip install \
-e $GITHUB_WORKSPACE/langchain/libs/core \
-e $GITHUB_WORKSPACE/langchain/libs/langchain_v1
# No API keys needed for now - deepagents `make test` only runs unit tests
- name: "🚀 Run ${{ matrix.package.name }} Tests"
run: |
cd ${{ matrix.package.name }}/${{ matrix.package.path }}
make test
================================================
FILE: .github/workflows/pr_labeler.yml
================================================
# Unified PR labeler — applies size, file-based, title-based, and
# contributor classification labels in a single sequential workflow.
#
# Consolidates pr_labeler_file.yml, pr_labeler_title.yml,
# pr_size_labeler.yml, and PR-handling from tag-external-contributions.yml
# into one workflow to eliminate race conditions from concurrent label
# mutations. tag-external-issues.yml remains active for issue-only
# labeling. Backfill lives in pr_labeler_backfill.yml.
#
# Config and shared logic live in .github/scripts/pr-labeler-config.json
# and .github/scripts/pr-labeler.js — update those when adding partners.
#
# Setup Requirements:
# 1. Create a GitHub App with permissions:
# - Repository: Pull requests (write)
# - Repository: Issues (write)
# - Organization: Members (read)
# 2. Install the app on your organization and this repository
# 3. Add these repository secrets:
# - ORG_MEMBERSHIP_APP_ID: Your app's ID
# - ORG_MEMBERSHIP_APP_PRIVATE_KEY: Your app's private key
#
# The GitHub App token is required to check private organization membership
# and to propagate label events to downstream workflows.
name: "🏷️ PR Labeler"
on:
# Safe since we're not checking out or running the PR's code.
# NEVER CHECK OUT UNTRUSTED CODE FROM A PR's HEAD IN A pull_request_target JOB.
# Doing so would allow attackers to execute arbitrary code in the context of your repository.
pull_request_target:
types: [opened, synchronize, reopened, edited]
permissions:
contents: read
concurrency:
# Separate opened events so external/tier labels are never lost to cancellation
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-${{ github.event.action == 'opened' && 'opened' || 'update' }}
cancel-in-progress: ${{ github.event.action != 'opened' }}
jobs:
label:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
issues: write
steps:
# Checks out the BASE branch (safe for pull_request_target — never
# the PR head). Needed to load .github/scripts/pr-labeler*.
- uses: actions/checkout@v6
- name: Generate GitHub App token
if: github.event.action == 'opened'
id: app-token
uses: actions/create-github-app-token@v3
with:
app-id: ${{ secrets.ORG_MEMBERSHIP_APP_ID }}
private-key: ${{ secrets.ORG_MEMBERSHIP_APP_PRIVATE_KEY }}
- name: Verify App token
if: github.event.action == 'opened'
run: |
if [ -z "${{ steps.app-token.outputs.token }}" ]; then
echo "::error::GitHub App token generation failed — cannot classify contributor"
exit 1
fi
- name: Check org membership
if: github.event.action == 'opened'
id: check-membership
uses: actions/github-script@v8
with:
github-token: ${{ steps.app-token.outputs.token }}
script: |
const { owner, repo } = context.repo;
const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
const author = context.payload.sender.login;
const { isExternal } = await h.checkMembership(
author, context.payload.sender.type,
);
core.setOutput('is-external', isExternal ? 'true' : 'false');
- name: Apply PR labels
uses: actions/github-script@v8
env:
IS_EXTERNAL: ${{ steps.check-membership.outputs.is-external }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const { owner, repo } = context.repo;
const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
const pr = context.payload.pull_request;
if (!pr) return;
const prNumber = pr.number;
const action = context.payload.action;
const toAdd = new Set();
const toRemove = new Set();
const currentLabels = (await github.paginate(
github.rest.issues.listLabelsOnIssue,
{ owner, repo, issue_number: prNumber, per_page: 100 },
)).map(l => l.name ?? '');
// ── Size + file labels (skip on 'edited' — files unchanged) ──
if (action !== 'edited') {
for (const sl of h.sizeLabels) await h.ensureLabel(sl);
const files = await github.paginate(github.rest.pulls.listFiles, {
owner, repo, pull_number: prNumber, per_page: 100,
});
const { totalChanged, sizeLabel } = h.computeSize(files);
toAdd.add(sizeLabel);
for (const sl of h.sizeLabels) {
if (currentLabels.includes(sl) && sl !== sizeLabel) toRemove.add(sl);
}
console.log(`Size: ${totalChanged} changed lines → ${sizeLabel}`);
for (const label of h.matchFileLabels(files)) {
toAdd.add(label);
}
}
// ── Title-based labels ──
const { labels: titleLabels, typeLabel } = h.matchTitleLabels(pr.title || '');
for (const label of titleLabels) toAdd.add(label);
// Remove stale type labels only when a type was detected
if (typeLabel) {
for (const tl of h.allTypeLabels) {
if (currentLabels.includes(tl) && !titleLabels.has(tl)) toRemove.add(tl);
}
}
// ── Internal label (only on open, non-external contributors) ──
// IS_EXTERNAL is empty string on non-opened events (step didn't
// run), so this guard is only true for opened + internal.
if (action === 'opened' && process.env.IS_EXTERNAL === 'false') {
toAdd.add('internal');
}
// ── Apply changes ──
// Ensure all labels we're about to add exist (addLabels returns
// 422 if any label in the batch is missing, which would prevent
// ALL labels from being applied).
for (const name of toAdd) {
await h.ensureLabel(name);
}
for (const name of toRemove) {
if (toAdd.has(name)) continue;
try {
await github.rest.issues.removeLabel({
owner, repo, issue_number: prNumber, name,
});
} catch (e) {
if (e.status !== 404) throw e;
}
}
const addList = [...toAdd];
if (addList.length > 0) {
await github.rest.issues.addLabels({
owner, repo, issue_number: prNumber, labels: addList,
});
}
const removed = [...toRemove].filter(r => !toAdd.has(r));
console.log(`PR #${prNumber}: +[${addList.join(', ')}] -[${removed.join(', ')}]`);
# Apply tier label BEFORE the external label so that
# "trusted-contributor" is already present when the "external" labeled
# event fires and triggers require_issue_link.yml.
- name: Apply contributor tier label
if: github.event.action == 'opened' && steps.check-membership.outputs.is-external == 'true'
uses: actions/github-script@v8
with:
github-token: ${{ steps.app-token.outputs.token }}
script: |
const { owner, repo } = context.repo;
const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
const pr = context.payload.pull_request;
await h.applyTierLabel(pr.number, pr.user.login);
- name: Add external label
if: github.event.action == 'opened' && steps.check-membership.outputs.is-external == 'true'
uses: actions/github-script@v8
with:
# Use App token so the "labeled" event propagates to downstream
# workflows (e.g. require_issue_link.yml). Events created by the
# default GITHUB_TOKEN do not trigger additional workflow runs.
github-token: ${{ steps.app-token.outputs.token }}
script: |
const { owner, repo } = context.repo;
const prNumber = context.payload.pull_request.number;
const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
await h.ensureLabel('external');
await github.rest.issues.addLabels({
owner, repo,
issue_number: prNumber,
labels: ['external'],
});
console.log(`Added 'external' label to PR #${prNumber}`);
================================================
FILE: .github/workflows/pr_labeler_backfill.yml
================================================
# Backfill PR labels on all open PRs.
#
# Manual-only workflow that applies the same labels as pr_labeler.yml
# (size, file, title, contributor classification) to existing open PRs.
# Reuses shared logic from .github/scripts/pr-labeler.js.
name: "🏷️ PR Labeler Backfill"
on:
workflow_dispatch:
inputs:
max_items:
description: "Maximum number of open PRs to process"
default: "100"
type: string
permissions:
contents: read
jobs:
backfill:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
issues: write
steps:
- uses: actions/checkout@v6
- name: Generate GitHub App token
id: app-token
uses: actions/create-github-app-token@v3
with:
app-id: ${{ secrets.ORG_MEMBERSHIP_APP_ID }}
private-key: ${{ secrets.ORG_MEMBERSHIP_APP_PRIVATE_KEY }}
- name: Backfill labels on open PRs
uses: actions/github-script@v8
with:
github-token: ${{ steps.app-token.outputs.token }}
script: |
const { owner, repo } = context.repo;
const rawMax = '${{ inputs.max_items }}';
const maxItems = parseInt(rawMax, 10);
if (isNaN(maxItems) || maxItems <= 0) {
core.setFailed(`Invalid max_items: "${rawMax}" — must be a positive integer`);
return;
}
const { h } = require('./.github/scripts/pr-labeler.js').loadAndInit(github, owner, repo, core);
for (const name of [...h.sizeLabels, ...h.tierLabels]) {
await h.ensureLabel(name);
}
const contributorCache = new Map();
const fileRules = h.buildFileRules();
const prs = await github.paginate(github.rest.pulls.list, {
owner, repo, state: 'open', per_page: 100,
});
let processed = 0;
let failures = 0;
for (const pr of prs) {
if (processed >= maxItems) break;
try {
const author = pr.user.login;
const info = await h.getContributorInfo(contributorCache, author, pr.user.type);
const labels = new Set();
labels.add(info.isExternal ? 'external' : 'internal');
if (info.isExternal && info.mergedCount != null && info.mergedCount >= h.trustedThreshold) {
labels.add('trusted-contributor');
} else if (info.isExternal && info.mergedCount === 0) {
labels.add('new-contributor');
}
// Size + file labels
const files = await github.paginate(github.rest.pulls.listFiles, {
owner, repo, pull_number: pr.number, per_page: 100,
});
const { sizeLabel } = h.computeSize(files);
labels.add(sizeLabel);
for (const label of h.matchFileLabels(files, fileRules)) {
labels.add(label);
}
// Title labels
const { labels: titleLabels } = h.matchTitleLabels(pr.title ?? '');
for (const tl of titleLabels) labels.add(tl);
// Ensure all labels exist before batch add
for (const name of labels) {
await h.ensureLabel(name);
}
// Remove stale managed labels
const currentLabels = (await github.paginate(
github.rest.issues.listLabelsOnIssue,
{ owner, repo, issue_number: pr.number, per_page: 100 },
)).map(l => l.name ?? '');
const managed = [...h.sizeLabels, ...h.tierLabels, ...h.allTypeLabels];
for (const name of currentLabels) {
if (managed.includes(name) && !labels.has(name)) {
try {
await github.rest.issues.removeLabel({
owner, repo, issue_number: pr.number, name,
});
} catch (e) {
if (e.status !== 404) throw e;
}
}
}
await github.rest.issues.addLabels({
owner, repo, issue_number: pr.number, labels: [...labels],
});
console.log(`PR #${pr.number} (${author}): ${[...labels].join(', ')}`);
processed++;
} catch (e) {
failures++;
core.warning(`Failed to process PR #${pr.number}: ${e.message}`);
}
}
console.log(`\nBackfill complete. Processed ${processed} PRs, ${failures} failures. ${contributorCache.size} unique authors.`);
================================================
FILE: .github/workflows/pr_lint.yml
================================================
# PR title linting.
#
# FORMAT (Conventional Commits 1.0.0):
#
# <type>[optional scope]: <description>
# [optional body]
# [optional footer(s)]
#
# Examples:
# feat(core): add multi‐tenant support
# fix(langchain): resolve error
# docs: update API usage examples
# docs(openai): update API usage examples
#
# Allowed Types:
# * feat — a new feature (MINOR)
# * fix — a bug fix (PATCH)
# * docs — documentation only changes
# * style — formatting, linting, etc.; no code change or typing refactors
# * refactor — code change that neither fixes a bug nor adds a feature
# * perf — code change that improves performance
# * test — adding tests or correcting existing
# * build — changes that affect the build system/external dependencies
# * ci — continuous integration/configuration changes
# * chore — other changes that don't modify source or test files
# * revert — reverts a previous commit
# * release — prepare a new release
# * hotfix — urgent fix
#
# Allowed Scope(s) (optional):
# core, langchain, langchain-classic, model-profiles,
# standard-tests, text-splitters, docs, anthropic, chroma, deepseek, exa,
# fireworks, groq, huggingface, mistralai, nomic, ollama, openai,
# perplexity, qdrant, xai, infra, deps, partners
#
# Multiple scopes can be used by separating them with a comma. For example:
#
# feat(core,langchain): add multi‐tenant support to core and langchain
#
# Note: PRs touching the langchain package should use the 'langchain' scope. It is not
# acceptable to omit the scope for changes to the langchain package, despite it being
# the main package & name of the repo.
#
# Rules:
# 1. The 'Type' must start with a lowercase letter.
# 2. Breaking changes: append "!" after type/scope (e.g., feat!: drop x support)
# 3. When releasing (updating the pyproject.toml and uv.lock), the commit message
# should be: `release(scope): x.y.z` (e.g., `release(core): 1.2.0` with no
# body, footer, or preceeding/proceeding text).
#
# Enforces Conventional Commits format for pull request titles to maintain a clear and
# machine-readable change history.
name: "🏷️ PR Title Lint"
permissions:
pull-requests: read
on:
pull_request:
types: [opened, edited, synchronize]
jobs:
# Validates that PR title follows Conventional Commits 1.0.0 specification
lint-pr-title:
name: "validate format"
runs-on: ubuntu-latest
steps:
- name: "🚫 Reject empty scope"
env:
PR_TITLE: ${{ github.event.pull_request.title }}
run: |
if [[ "$PR_TITLE" =~ ^[a-z]+\(\)[!]?: ]]; then
echo "::error::PR title has empty scope parentheses: '$PR_TITLE'"
echo "Either remove the parentheses or provide a scope (e.g., 'fix(core): ...')."
exit 1
fi
- name: "✅ Validate Conventional Commits Format"
uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
types: |
feat
fix
docs
style
refactor
perf
test
build
ci
chore
revert
release
hotfix
scopes: |
core
langchain
langchain-classic
model-profiles
standard-tests
text-splitters
docs
anthropic
chroma
deepseek
exa
fireworks
groq
huggingface
mistralai
nomic
ollama
openai
openrouter
perplexity
qdrant
xai
infra
deps
partners
requireScope: false
disallowScopes: |
release
[A-Z]+
ignoreLabels: |
ignore-lint-pr-title
================================================
FILE: .github/workflows/refresh_model_profiles.yml
================================================
# Refreshes model profile data for all in-monorepo partner integrations by
# pulling the latest metadata from models.dev via the `langchain-profiles` CLI.
#
# Creates a pull request with any changes. Runs daily and can be triggered
# manually from the Actions UI. Uses a fixed branch so each run supersedes
# any stale PR from a previous run.
name: "🔄 Refresh Model Profiles"
on:
schedule:
- cron: "0 8 * * *" # daily at 08:00 UTC
workflow_dispatch:
permissions:
contents: write
pull-requests: write
jobs:
refresh-profiles:
uses: ./.github/workflows/_refresh_model_profiles.yml
with:
providers: >-
[
{"provider":"anthropic", "data_dir":"libs/partners/anthropic/langchain_anthropic/data"},
{"provider":"deepseek", "data_dir":"libs/partners/deepseek/langchain_deepseek/data"},
{"provider":"fireworks-ai", "data_dir":"libs/partners/fireworks/langchain_fireworks/data"},
{"provider":"groq", "data_dir":"libs/partners/groq/langchain_groq/data"},
{"provider":"huggingface", "data_dir":"libs/partners/huggingface/langchain_huggingface/data"},
{"provider":"mistral", "data_dir":"libs/partners/mistralai/langchain_mistralai/data"},
{"provider":"openai", "data_dir":"libs/partners/openai/langchain_openai/data"},
{"provider":"openrouter", "data_dir":"libs/partners/openrouter/langchain_openrouter/data"},
{"provider":"perplexity", "data_dir":"libs/partners/perplexity/langchain_perplexity/data"},
{"provider":"xai", "data_dir":"libs/partners/xai/langchain_xai/data"}
]
cli-path: libs/model-profiles
add-paths: libs/partners/**/data/_profiles.py
pr-body: |
Automated refresh of model profile data for all in-monorepo partner
integrations via `langchain-profiles refresh`.
🤖 Generated by the `refresh_model_profiles` workflow.
secrets:
MODEL_PROFILE_BOT_APP_ID: ${{ secrets.MODEL_PROFILE_BOT_APP_ID }}
MODEL_PROFILE_BOT_PRIVATE_KEY: ${{ secrets.MODEL_PROFILE_BOT_PRIVATE_KEY }}
================================================
FILE: .github/workflows/reopen_on_assignment.yml
================================================
# Reopen PRs that were auto-closed by require_issue_link.yml when the
# contributor was not assigned to the linked issue. When a maintainer
# assigns the contributor to the issue, this workflow finds matching
# closed PRs, verifies the issue link, and reopens them.
#
# Uses the default GITHUB_TOKEN (not a PAT or app token) so that the
# reopen and label-removal events do NOT re-trigger other workflows.
# GitHub suppresses events created by the default GITHUB_TOKEN within
# workflow runs to prevent infinite loops.
name: Reopen PR on Issue Assignment
on:
issues:
types: [assigned]
permissions:
contents: read
jobs:
reopen-linked-prs:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Find and reopen matching PRs
uses: actions/github-script@v8
with:
script: |
const { owner, repo } = context.repo;
const issueNumber = context.payload.issue.number;
const assignee = context.payload.assignee.login;
console.log(
`Issue #${issueNumber} assigned to ${assignee} — searching for closed PRs to reopen`,
);
const q = [
`is:pr`,
`is:closed`,
`author:${assignee}`,
`label:missing-issue-link`,
`repo:${owner}/${repo}`,
].join(' ');
let data;
try {
({ data } = await github.rest.search.issuesAndPullRequests({
q,
per_page: 30,
}));
} catch (e) {
throw new Error(
`Failed to search for closed PRs to reopen after assigning ${assignee} ` +
`to #${issueNumber} (HTTP ${e.status ?? 'unknown'}): ${e.message}`,
);
}
if (data.total_count === 0) {
console.log('No matching closed PRs found');
return;
}
console.log(`Found ${data.total_count} candidate PR(s)`);
// Must stay in sync with the identical pattern in require_issue_link.yml
const pattern = /(?:close[sd]?|fix(?:e[sd])?|resolve[sd]?)\s*#(\d+)/gi;
for (const item of data.items) {
const prNumber = item.number;
const body = item.body || '';
const matches = [...body.matchAll(pattern)];
const referencedIssues = matches.map(m => parseInt(m[1], 10));
if (!referencedIssues.includes(issueNumber)) {
console.log(`PR #${prNumber} does not reference #${issueNumber} — skipping`);
continue;
}
// Skip if already bypassed
const labels = item.labels.map(l => l.name);
if (labels.includes('bypass-issue-check')) {
console.log(`PR #${prNumber} already has bypass-issue-check — skipping`);
continue;
}
// Reopen first, remove label second — a closed PR that still has
// missing-issue-link is recoverable; a closed PR with the label
// stripped is invisible to both workflows.
try {
await github.rest.pulls.update({
owner,
repo,
pull_number: prNumber,
state: 'open',
});
console.log(`Reopened PR #${prNumber}`);
} catch (e) {
if (e.status === 422) {
// Head branch deleted — PR is unrecoverable. Notify the
// contributor so they know to open a new PR.
core.warning(`Cannot reopen PR #${prNumber}: head branch was likely deleted`);
try {
await github.rest.issues.createComment({
owner,
repo,
issue_number: prNumber,
body:
`You have been assigned to #${issueNumber}, but this PR could not be ` +
`reopened because the head branch has been deleted. Please open a new ` +
`PR referencing the issue.`,
});
} catch (commentErr) {
core.warning(
`Also failed to post comment on PR #${prNumber}: ${commentErr.message}`,
);
}
continue;
}
// Transient errors (rate limit, 5xx) should fail the job so
// the label is NOT removed and the run can be retried.
throw e;
}
// Remove missing-issue-link label only after successful reopen
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: prNumber,
name: 'missing-issue-link',
});
console.log(`Removed missing-issue-link from PR #${prNumber}`);
} catch (e) {
if (e.status !== 404) throw e;
}
// Minimize stale enforcement comment (best-effort;
// sync w/ require_issue_link.yml minimize blocks)
try {
const marker = '<!-- require-issue-link -->';
const comments = await github.paginate(
github.rest.issues.listComments,
{ owner, repo, issue_number: prNumber, per_page: 100 },
);
const stale = comments.find(c => c.body && c.body.includes(marker));
if (stale) {
await github.graphql(`
mutation($id: ID!) {
minimizeComment(input: {subjectId: $id, classifier: OUTDATED}) {
minimizedComment { isMinimized }
}
}
`, { id: stale.node_id });
console.log(`Minimized stale enforcement comment ${stale.id} as outdated`);
}
} catch (e) {
core.warning(`Could not minimize stale comment on PR #${prNumber}: ${e.message}`);
}
}
================================================
FILE: .github/workflows/require_issue_link.yml
================================================
# Require external PRs to reference an approved issue (e.g. Fixes #NNN) and
# the PR author to be assigned to that issue. On failure the PR is
# labeled "missing-issue-link", commented on, and closed.
#
# Maintainer override: an org member can reopen the PR or remove
# "missing-issue-link" — both add "bypass-issue-check" and reopen.
#
# Dependency: pr_labeler.yml must apply the "external" label first. This
# workflow does NOT trigger on "opened" (new PRs have no labels yet, so the
# gate would always skip).
name: Require Issue Link
on:
pull_request_target:
# NEVER CHECK OUT UNTRUSTED CODE FROM A PR's HEAD IN A pull_request_target JOB.
# Doing so would allow attackers to execute arbitrary code in the context of your repository.
types: [edited, reopened, labeled, unlabeled]
# ──────────────────────────────────────────────────────────────────────────────
# Enforcement gate: set to 'true' to activate the issue link requirement.
# When 'false', the workflow still runs the check logic (useful for dry-run
# visibility) but will NOT label, comment, close, or fail PRs.
# ──────────────────────────────────────────────────────────────────────────────
env:
ENFORCE_ISSUE_LINK: "true"
permissions:
contents: read
jobs:
check-issue-link:
# Run when the "external" label is added, on edit/reopen if already labeled,
# or when "missing-issue-link" is removed (triggers maintainer override check).
# Skip entirely when the PR already carries "trusted-contributor" or
# "bypass-issue-check".
if: >-
!contains(github.event.pull_request.labels.*.name, 'trusted-contributor') &&
!contains(github.event.pull_request.labels.*.name, 'bypass-issue-check') &&
(
(github.event.action == 'labeled' && github.event.label.name == 'external') ||
(github.event.action == 'unlabeled' && github.event.label.name == 'missing-issue-link' && contains(github.event.pull_request.labels.*.name, 'external')) ||
(github.event.action != 'labeled' && github.event.action != 'unlabeled' && contains(github.event.pull_request.labels.*.name, 'external'))
)
runs-on: ubuntu-latest
permissions:
actions: write
pull-requests: write
steps:
- name: Check for issue link and assignee
id: check-link
uses: actions/github-script@v8
with:
script: |
const { owner, repo } = context.repo;
const prNumber = context.payload.pull_request.number;
const action = context.payload.action;
// ── Helper: ensure a label exists, then add it to the PR ────────
async function ensureAndAddLabel(labelName, color) {
try {
await github.rest.issues.getLabel({ owner, repo, name: labelName });
gitextract_cpd1yxs9/
├── .devcontainer/
│ ├── README.md
│ ├── devcontainer.json
│ └── docker-compose.yaml
├── .dockerignore
├── .editorconfig
├── .gitattributes
├── .github/
│ ├── CODEOWNERS
│ ├── ISSUE_TEMPLATE/
│ │ ├── bug-report.yml
│ │ ├── config.yml
│ │ ├── feature-request.yml
│ │ ├── privileged.yml
│ │ └── task.yml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── actions/
│ │ └── uv_setup/
│ │ └── action.yml
│ ├── dependabot.yml
│ ├── scripts/
│ │ ├── check_diff.py
│ │ ├── check_prerelease_dependencies.py
│ │ ├── get_min_versions.py
│ │ ├── pr-labeler-config.json
│ │ └── pr-labeler.js
│ ├── tools/
│ │ └── git-restore-mtime
│ └── workflows/
│ ├── _compile_integration_test.yml
│ ├── _lint.yml
│ ├── _refresh_model_profiles.yml
│ ├── _release.yml
│ ├── _test.yml
│ ├── _test_pydantic.yml
│ ├── auto-label-by-package.yml
│ ├── check_agents_sync.yml
│ ├── check_core_versions.yml
│ ├── check_diffs.yml
│ ├── close_unchecked_issues.yml
│ ├── codspeed.yml
│ ├── integration_tests.yml
│ ├── pr_labeler.yml
│ ├── pr_labeler_backfill.yml
│ ├── pr_lint.yml
│ ├── refresh_model_profiles.yml
│ ├── reopen_on_assignment.yml
│ ├── require_issue_link.yml
│ ├── tag-external-issues.yml
│ └── v03_api_doc_build.yml
├── .gitignore
├── .markdownlint.json
├── .mcp.json
├── .pre-commit-config.yaml
├── .vscode/
│ ├── extensions.json
│ └── settings.json
├── AGENTS.md
├── CITATION.cff
├── CLAUDE.md
├── LICENSE
├── README.md
└── libs/
├── Makefile
├── README.md
├── core/
│ ├── Makefile
│ ├── README.md
│ ├── extended_testing_deps.txt
│ ├── langchain_core/
│ │ ├── __init__.py
│ │ ├── _api/
│ │ │ ├── __init__.py
│ │ │ ├── beta_decorator.py
│ │ │ ├── deprecation.py
│ │ │ ├── internal.py
│ │ │ └── path.py
│ │ ├── _import_utils.py
│ │ ├── _security/
│ │ │ ├── __init__.py
│ │ │ └── _ssrf_protection.py
│ │ ├── agents.py
│ │ ├── caches.py
│ │ ├── callbacks/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── file.py
│ │ │ ├── manager.py
│ │ │ ├── stdout.py
│ │ │ ├── streaming_stdout.py
│ │ │ └── usage.py
│ │ ├── chat_history.py
│ │ ├── chat_loaders.py
│ │ ├── chat_sessions.py
│ │ ├── cross_encoders.py
│ │ ├── document_loaders/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── blob_loaders.py
│ │ │ └── langsmith.py
│ │ ├── documents/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── compressor.py
│ │ │ └── transformers.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ ├── embeddings.py
│ │ │ └── fake.py
│ │ ├── env.py
│ │ ├── example_selectors/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── length_based.py
│ │ │ └── semantic_similarity.py
│ │ ├── exceptions.py
│ │ ├── globals.py
│ │ ├── indexing/
│ │ │ ├── __init__.py
│ │ │ ├── api.py
│ │ │ ├── base.py
│ │ │ └── in_memory.py
│ │ ├── language_models/
│ │ │ ├── __init__.py
│ │ │ ├── _utils.py
│ │ │ ├── base.py
│ │ │ ├── chat_models.py
│ │ │ ├── fake.py
│ │ │ ├── fake_chat_models.py
│ │ │ ├── llms.py
│ │ │ └── model_profile.py
│ │ ├── load/
│ │ │ ├── __init__.py
│ │ │ ├── _validation.py
│ │ │ ├── dump.py
│ │ │ ├── load.py
│ │ │ ├── mapping.py
│ │ │ └── serializable.py
│ │ ├── messages/
│ │ │ ├── __init__.py
│ │ │ ├── ai.py
│ │ │ ├── base.py
│ │ │ ├── block_translators/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── anthropic.py
│ │ │ │ ├── bedrock.py
│ │ │ │ ├── bedrock_converse.py
│ │ │ │ ├── google_genai.py
│ │ │ │ ├── google_vertexai.py
│ │ │ │ ├── groq.py
│ │ │ │ ├── langchain_v0.py
│ │ │ │ └── openai.py
│ │ │ ├── chat.py
│ │ │ ├── content.py
│ │ │ ├── function.py
│ │ │ ├── human.py
│ │ │ ├── modifier.py
│ │ │ ├── system.py
│ │ │ ├── tool.py
│ │ │ └── utils.py
│ │ ├── output_parsers/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── format_instructions.py
│ │ │ ├── json.py
│ │ │ ├── list.py
│ │ │ ├── openai_functions.py
│ │ │ ├── openai_tools.py
│ │ │ ├── pydantic.py
│ │ │ ├── string.py
│ │ │ ├── transform.py
│ │ │ └── xml.py
│ │ ├── outputs/
│ │ │ ├── __init__.py
│ │ │ ├── chat_generation.py
│ │ │ ├── chat_result.py
│ │ │ ├── generation.py
│ │ │ ├── llm_result.py
│ │ │ └── run_info.py
│ │ ├── prompt_values.py
│ │ ├── prompts/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── chat.py
│ │ │ ├── dict.py
│ │ │ ├── few_shot.py
│ │ │ ├── few_shot_with_templates.py
│ │ │ ├── image.py
│ │ │ ├── loading.py
│ │ │ ├── message.py
│ │ │ ├── prompt.py
│ │ │ ├── string.py
│ │ │ └── structured.py
│ │ ├── py.typed
│ │ ├── rate_limiters.py
│ │ ├── retrievers.py
│ │ ├── runnables/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── branch.py
│ │ │ ├── config.py
│ │ │ ├── configurable.py
│ │ │ ├── fallbacks.py
│ │ │ ├── graph.py
│ │ │ ├── graph_ascii.py
│ │ │ ├── graph_mermaid.py
│ │ │ ├── graph_png.py
│ │ │ ├── history.py
│ │ │ ├── passthrough.py
│ │ │ ├── retry.py
│ │ │ ├── router.py
│ │ │ ├── schema.py
│ │ │ └── utils.py
│ │ ├── stores.py
│ │ ├── structured_query.py
│ │ ├── sys_info.py
│ │ ├── tools/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── convert.py
│ │ │ ├── render.py
│ │ │ ├── retriever.py
│ │ │ ├── simple.py
│ │ │ └── structured.py
│ │ ├── tracers/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── _streaming.py
│ │ │ ├── base.py
│ │ │ ├── context.py
│ │ │ ├── core.py
│ │ │ ├── evaluation.py
│ │ │ ├── event_stream.py
│ │ │ ├── langchain.py
│ │ │ ├── log_stream.py
│ │ │ ├── memory_stream.py
│ │ │ ├── root_listeners.py
│ │ │ ├── run_collector.py
│ │ │ ├── schemas.py
│ │ │ └── stdout.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── _merge.py
│ │ │ ├── aiter.py
│ │ │ ├── env.py
│ │ │ ├── formatting.py
│ │ │ ├── function_calling.py
│ │ │ ├── html.py
│ │ │ ├── image.py
│ │ │ ├── input.py
│ │ │ ├── interactive_env.py
│ │ │ ├── iter.py
│ │ │ ├── json.py
│ │ │ ├── json_schema.py
│ │ │ ├── mustache.py
│ │ │ ├── pydantic.py
│ │ │ ├── strings.py
│ │ │ ├── usage.py
│ │ │ ├── utils.py
│ │ │ └── uuid.py
│ │ ├── vectorstores/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── in_memory.py
│ │ │ └── utils.py
│ │ └── version.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ ├── check_version.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── benchmarks/
│ │ ├── __init__.py
│ │ ├── test_async_callbacks.py
│ │ └── test_imports.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── _api/
│ │ ├── __init__.py
│ │ ├── test_beta_decorator.py
│ │ ├── test_deprecation.py
│ │ ├── test_imports.py
│ │ └── test_path.py
│ ├── caches/
│ │ ├── __init__.py
│ │ └── test_in_memory_cache.py
│ ├── callbacks/
│ │ ├── __init__.py
│ │ ├── test_async_callback_manager.py
│ │ ├── test_dispatch_custom_event.py
│ │ ├── test_handle_event.py
│ │ ├── test_imports.py
│ │ ├── test_sync_callback_manager.py
│ │ └── test_usage_callback.py
│ ├── chat_history/
│ │ ├── __init__.py
│ │ └── test_chat_history.py
│ ├── conftest.py
│ ├── data/
│ │ ├── prompt_file.txt
│ │ └── prompts/
│ │ ├── prompt_extra_args.json
│ │ ├── prompt_missing_args.json
│ │ └── simple_prompt.json
│ ├── dependencies/
│ │ ├── __init__.py
│ │ └── test_dependencies.py
│ ├── document_loaders/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ └── test_langsmith.py
│ ├── documents/
│ │ ├── __init__.py
│ │ ├── test_document.py
│ │ ├── test_imports.py
│ │ └── test_str.py
│ ├── embeddings/
│ │ ├── __init__.py
│ │ └── test_deterministic_embedding.py
│ ├── example_selectors/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_imports.py
│ │ ├── test_length_based_example_selector.py
│ │ └── test_similarity.py
│ ├── examples/
│ │ ├── example-non-utf8.csv
│ │ ├── example-non-utf8.txt
│ │ ├── example-utf8.csv
│ │ ├── example-utf8.txt
│ │ ├── example_prompt.json
│ │ ├── examples.json
│ │ ├── examples.yaml
│ │ ├── few_shot_prompt.json
│ │ ├── few_shot_prompt.yaml
│ │ ├── few_shot_prompt_example_prompt.json
│ │ ├── few_shot_prompt_examples_in.json
│ │ ├── few_shot_prompt_yaml_examples.yaml
│ │ ├── jinja_injection_prompt.json
│ │ ├── jinja_injection_prompt.yaml
│ │ ├── prompt_with_output_parser.json
│ │ ├── simple_prompt.json
│ │ ├── simple_prompt.yaml
│ │ ├── simple_prompt_with_template_file.json
│ │ └── simple_template.txt
│ ├── fake/
│ │ ├── __init__.py
│ │ ├── callbacks.py
│ │ └── test_fake_chat_model.py
│ ├── indexing/
│ │ ├── __init__.py
│ │ ├── test_hashed_document.py
│ │ ├── test_in_memory_indexer.py
│ │ ├── test_in_memory_record_manager.py
│ │ ├── test_indexing.py
│ │ └── test_public_api.py
│ ├── language_models/
│ │ ├── __init__.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_benchmark.py
│ │ │ ├── test_cache.py
│ │ │ └── test_rate_limiting.py
│ │ ├── llms/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ └── test_cache.py
│ │ ├── test_imports.py
│ │ └── test_model_profile.py
│ ├── load/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ ├── test_secret_injection.py
│ │ └── test_serializable.py
│ ├── messages/
│ │ ├── __init__.py
│ │ ├── block_translators/
│ │ │ ├── __init__.py
│ │ │ ├── test_anthropic.py
│ │ │ ├── test_bedrock.py
│ │ │ ├── test_bedrock_converse.py
│ │ │ ├── test_google_genai.py
│ │ │ ├── test_groq.py
│ │ │ ├── test_langchain_v0.py
│ │ │ ├── test_openai.py
│ │ │ └── test_registration.py
│ │ ├── test_ai.py
│ │ ├── test_imports.py
│ │ └── test_utils.py
│ ├── output_parsers/
│ │ ├── __init__.py
│ │ ├── test_base_parsers.py
│ │ ├── test_imports.py
│ │ ├── test_json.py
│ │ ├── test_list_parser.py
│ │ ├── test_openai_functions.py
│ │ ├── test_openai_tools.py
│ │ ├── test_pydantic_parser.py
│ │ └── test_xml_parser.py
│ ├── outputs/
│ │ ├── __init__.py
│ │ ├── test_chat_generation.py
│ │ └── test_imports.py
│ ├── prompt_file.txt
│ ├── prompts/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ ├── test_chat.ambr
│ │ │ └── test_prompt.ambr
│ │ ├── prompt_extra_args.json
│ │ ├── prompt_missing_args.json
│ │ ├── simple_prompt.json
│ │ ├── test_chat.py
│ │ ├── test_dict.py
│ │ ├── test_few_shot.py
│ │ ├── test_few_shot_with_templates.py
│ │ ├── test_image.py
│ │ ├── test_imports.py
│ │ ├── test_loading.py
│ │ ├── test_prompt.py
│ │ ├── test_string.py
│ │ ├── test_structured.py
│ │ └── test_utils.py
│ ├── pydantic_utils.py
│ ├── rate_limiters/
│ │ ├── __init__.py
│ │ └── test_in_memory_rate_limiter.py
│ ├── runnables/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ ├── test_fallbacks.ambr
│ │ │ ├── test_graph.ambr
│ │ │ └── test_runnable.ambr
│ │ ├── test_concurrency.py
│ │ ├── test_config.py
│ │ ├── test_configurable.py
│ │ ├── test_fallbacks.py
│ │ ├── test_graph.py
│ │ ├── test_history.py
│ │ ├── test_imports.py
│ │ ├── test_runnable.py
│ │ ├── test_runnable_events_v1.py
│ │ ├── test_runnable_events_v2.py
│ │ ├── test_tracing_interops.py
│ │ └── test_utils.py
│ ├── stores/
│ │ ├── __init__.py
│ │ └── test_in_memory.py
│ ├── stubs.py
│ ├── test_globals.py
│ ├── test_imports.py
│ ├── test_messages.py
│ ├── test_outputs.py
│ ├── test_prompt_values.py
│ ├── test_pydantic_imports.py
│ ├── test_pydantic_serde.py
│ ├── test_retrievers.py
│ ├── test_setup.py
│ ├── test_ssrf_protection.py
│ ├── test_sys_info.py
│ ├── test_tools.py
│ ├── tracers/
│ │ ├── __init__.py
│ │ ├── test_async_base_tracer.py
│ │ ├── test_automatic_metadata.py
│ │ ├── test_base_tracer.py
│ │ ├── test_imports.py
│ │ ├── test_langchain.py
│ │ ├── test_memory_stream.py
│ │ ├── test_run_collector.py
│ │ └── test_schemas.py
│ ├── utils/
│ │ ├── __init__.py
│ │ ├── test_aiter.py
│ │ ├── test_env.py
│ │ ├── test_formatting.py
│ │ ├── test_function_calling.py
│ │ ├── test_html.py
│ │ ├── test_imports.py
│ │ ├── test_iter.py
│ │ ├── test_json_schema.py
│ │ ├── test_pydantic.py
│ │ ├── test_rm_titles.py
│ │ ├── test_strings.py
│ │ ├── test_usage.py
│ │ ├── test_utils.py
│ │ └── test_uuid_utils.py
│ └── vectorstores/
│ ├── __init__.py
│ ├── test_in_memory.py
│ ├── test_utils.py
│ └── test_vectorstore.py
├── langchain/
│ ├── .dockerignore
│ ├── .flake8
│ ├── LICENSE
│ ├── Makefile
│ ├── README.md
│ ├── dev.Dockerfile
│ ├── extended_testing_deps.txt
│ ├── langchain_classic/
│ │ ├── __init__.py
│ │ ├── _api/
│ │ │ ├── __init__.py
│ │ │ ├── deprecation.py
│ │ │ ├── interactive_env.py
│ │ │ ├── module_import.py
│ │ │ └── path.py
│ │ ├── adapters/
│ │ │ ├── __init__.py
│ │ │ └── openai.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ ├── agent.py
│ │ │ ├── agent_iterator.py
│ │ │ ├── agent_toolkits/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── ainetwork/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── amadeus/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── azure_cognitive_services.py
│ │ │ │ ├── base.py
│ │ │ │ ├── clickup/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── conversational_retrieval/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── openai_functions.py
│ │ │ │ │ └── tool.py
│ │ │ │ ├── csv/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── file_management/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── github/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── gitlab/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── gmail/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── jira/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── json/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── multion/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── nasa/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── nla/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── tool.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── office365/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── openapi/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── planner.py
│ │ │ │ │ ├── planner_prompt.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ ├── spec.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── pandas/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── playwright/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── powerbi/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── chat_base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── python/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── slack/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── spark/
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── spark_sql/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── sql/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── steam/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── vectorstore/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── base.py
│ │ │ │ │ ├── prompt.py
│ │ │ │ │ └── toolkit.py
│ │ │ │ ├── xorbits/
│ │ │ │ │ └── __init__.py
│ │ │ │ └── zapier/
│ │ │ │ ├── __init__.py
│ │ │ │ └── toolkit.py
│ │ │ ├── agent_types.py
│ │ │ ├── chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── conversational/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── conversational_chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── format_scratchpad/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── log.py
│ │ │ │ ├── log_to_messages.py
│ │ │ │ ├── openai_functions.py
│ │ │ │ ├── openai_tools.py
│ │ │ │ ├── tools.py
│ │ │ │ └── xml.py
│ │ │ ├── initialize.py
│ │ │ ├── json_chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── load_tools.py
│ │ │ ├── loading.py
│ │ │ ├── mrkl/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── openai_assistant/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── openai_functions_agent/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── agent_token_buffer_memory.py
│ │ │ │ └── base.py
│ │ │ ├── openai_functions_multi_agent/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── openai_tools/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── output_parsers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── json.py
│ │ │ │ ├── openai_functions.py
│ │ │ │ ├── openai_tools.py
│ │ │ │ ├── react_json_single_input.py
│ │ │ │ ├── react_single_input.py
│ │ │ │ ├── self_ask.py
│ │ │ │ ├── tools.py
│ │ │ │ └── xml.py
│ │ │ ├── react/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── agent.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ ├── textworld_prompt.py
│ │ │ │ └── wiki_prompt.py
│ │ │ ├── schema.py
│ │ │ ├── self_ask_with_search/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── structured_chat/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── output_parser.py
│ │ │ │ └── prompt.py
│ │ │ ├── tool_calling_agent/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── tools.py
│ │ │ ├── types.py
│ │ │ ├── utils.py
│ │ │ └── xml/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── prompt.py
│ │ ├── base_language.py
│ │ ├── base_memory.py
│ │ ├── cache.py
│ │ ├── callbacks/
│ │ │ ├── __init__.py
│ │ │ ├── aim_callback.py
│ │ │ ├── argilla_callback.py
│ │ │ ├── arize_callback.py
│ │ │ ├── arthur_callback.py
│ │ │ ├── base.py
│ │ │ ├── clearml_callback.py
│ │ │ ├── comet_ml_callback.py
│ │ │ ├── confident_callback.py
│ │ │ ├── context_callback.py
│ │ │ ├── file.py
│ │ │ ├── flyte_callback.py
│ │ │ ├── human.py
│ │ │ ├── infino_callback.py
│ │ │ ├── labelstudio_callback.py
│ │ │ ├── llmonitor_callback.py
│ │ │ ├── manager.py
│ │ │ ├── mlflow_callback.py
│ │ │ ├── openai_info.py
│ │ │ ├── promptlayer_callback.py
│ │ │ ├── sagemaker_callback.py
│ │ │ ├── stdout.py
│ │ │ ├── streaming_aiter.py
│ │ │ ├── streaming_aiter_final_only.py
│ │ │ ├── streaming_stdout.py
│ │ │ ├── streaming_stdout_final_only.py
│ │ │ ├── streamlit/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── mutable_expander.py
│ │ │ │ └── streamlit_callback_handler.py
│ │ │ ├── tracers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── comet.py
│ │ │ │ ├── evaluation.py
│ │ │ │ ├── langchain.py
│ │ │ │ ├── log_stream.py
│ │ │ │ ├── logging.py
│ │ │ │ ├── root_listeners.py
│ │ │ │ ├── run_collector.py
│ │ │ │ ├── schemas.py
│ │ │ │ ├── stdout.py
│ │ │ │ └── wandb.py
│ │ │ ├── trubrics_callback.py
│ │ │ ├── utils.py
│ │ │ ├── wandb_callback.py
│ │ │ └── whylabs_callback.py
│ │ ├── chains/
│ │ │ ├── __init__.py
│ │ │ ├── api/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── news_docs.py
│ │ │ │ ├── open_meteo_docs.py
│ │ │ │ ├── openapi/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── chain.py
│ │ │ │ │ ├── prompts.py
│ │ │ │ │ ├── requests_chain.py
│ │ │ │ │ └── response_chain.py
│ │ │ │ ├── podcast_docs.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── tmdb_docs.py
│ │ │ ├── base.py
│ │ │ ├── chat_vector_db/
│ │ │ │ ├── __init__.py
│ │ │ │ └── prompts.py
│ │ │ ├── combine_documents/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── map_reduce.py
│ │ │ │ ├── map_rerank.py
│ │ │ │ ├── reduce.py
│ │ │ │ ├── refine.py
│ │ │ │ └── stuff.py
│ │ │ ├── constitutional_ai/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── models.py
│ │ │ │ ├── principles.py
│ │ │ │ └── prompts.py
│ │ │ ├── conversation/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── memory.py
│ │ │ │ └── prompt.py
│ │ │ ├── conversational_retrieval/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── elasticsearch_database/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── ernie_functions/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── example_generator.py
│ │ │ ├── flare/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── graph_qa/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── arangodb.py
│ │ │ │ ├── base.py
│ │ │ │ ├── cypher.py
│ │ │ │ ├── cypher_utils.py
│ │ │ │ ├── falkordb.py
│ │ │ │ ├── gremlin.py
│ │ │ │ ├── hugegraph.py
│ │ │ │ ├── kuzu.py
│ │ │ │ ├── nebulagraph.py
│ │ │ │ ├── neptune_cypher.py
│ │ │ │ ├── neptune_sparql.py
│ │ │ │ ├── ontotext_graphdb.py
│ │ │ │ ├── prompts.py
│ │ │ │ └── sparql.py
│ │ │ ├── history_aware_retriever.py
│ │ │ ├── hyde/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts.py
│ │ │ ├── llm.py
│ │ │ ├── llm_bash/
│ │ │ │ └── __init__.py
│ │ │ ├── llm_checker/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── llm_math/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── llm_requests.py
│ │ │ ├── llm_summarization_checker/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompts/
│ │ │ │ ├── are_all_true_prompt.txt
│ │ │ │ ├── check_facts.txt
│ │ │ │ ├── create_facts.txt
│ │ │ │ └── revise_summary.txt
│ │ │ ├── llm_symbolic_math/
│ │ │ │ └── __init__.py
│ │ │ ├── loading.py
│ │ │ ├── mapreduce.py
│ │ │ ├── moderation.py
│ │ │ ├── natbot/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── crawler.py
│ │ │ │ └── prompt.py
│ │ │ ├── openai_functions/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── citation_fuzzy_match.py
│ │ │ │ ├── extraction.py
│ │ │ │ ├── openapi.py
│ │ │ │ ├── qa_with_structure.py
│ │ │ │ ├── tagging.py
│ │ │ │ └── utils.py
│ │ │ ├── openai_tools/
│ │ │ │ ├── __init__.py
│ │ │ │ └── extraction.py
│ │ │ ├── prompt_selector.py
│ │ │ ├── qa_generation/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── qa_with_sources/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── loading.py
│ │ │ │ ├── map_reduce_prompt.py
│ │ │ │ ├── refine_prompts.py
│ │ │ │ ├── retrieval.py
│ │ │ │ ├── stuff_prompt.py
│ │ │ │ └── vector_db.py
│ │ │ ├── query_constructor/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── ir.py
│ │ │ │ ├── parser.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── schema.py
│ │ │ ├── question_answering/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chain.py
│ │ │ │ ├── map_reduce_prompt.py
│ │ │ │ ├── map_rerank_prompt.py
│ │ │ │ ├── refine_prompts.py
│ │ │ │ └── stuff_prompt.py
│ │ │ ├── retrieval.py
│ │ │ ├── retrieval_qa/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ └── prompt.py
│ │ │ ├── router/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── embedding_router.py
│ │ │ │ ├── llm_router.py
│ │ │ │ ├── multi_prompt.py
│ │ │ │ ├── multi_prompt_prompt.py
│ │ │ │ ├── multi_retrieval_prompt.py
│ │ │ │ └── multi_retrieval_qa.py
│ │ │ ├── sequential.py
│ │ │ ├── sql_database/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── query.py
│ │ │ ├── structured_output/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── summarize/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── chain.py
│ │ │ │ ├── map_reduce_prompt.py
│ │ │ │ ├── refine_prompts.py
│ │ │ │ └── stuff_prompt.py
│ │ │ └── transform.py
│ │ ├── chat_loaders/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── facebook_messenger.py
│ │ │ ├── gmail.py
│ │ │ ├── imessage.py
│ │ │ ├── langsmith.py
│ │ │ ├── slack.py
│ │ │ ├── telegram.py
│ │ │ ├── utils.py
│ │ │ └── whatsapp.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ ├── anthropic.py
│ │ │ ├── anyscale.py
│ │ │ ├── azure_openai.py
│ │ │ ├── azureml_endpoint.py
│ │ │ ├── baichuan.py
│ │ │ ├── baidu_qianfan_endpoint.py
│ │ │ ├── base.py
│ │ │ ├── bedrock.py
│ │ │ ├── cohere.py
│ │ │ ├── databricks.py
│ │ │ ├── ernie.py
│ │ │ ├── everlyai.py
│ │ │ ├── fake.py
│ │ │ ├── fireworks.py
│ │ │ ├── gigachat.py
│ │ │ ├── google_palm.py
│ │ │ ├── human.py
│ │ │ ├── hunyuan.py
│ │ │ ├── javelin_ai_gateway.py
│ │ │ ├── jinachat.py
│ │ │ ├── konko.py
│ │ │ ├── litellm.py
│ │ │ ├── meta.py
│ │ │ ├── minimax.py
│ │ │ ├── mlflow.py
│ │ │ ├── mlflow_ai_gateway.py
│ │ │ ├── ollama.py
│ │ │ ├── openai.py
│ │ │ ├── pai_eas_endpoint.py
│ │ │ ├── promptlayer_openai.py
│ │ │ ├── tongyi.py
│ │ │ ├── vertexai.py
│ │ │ ├── volcengine_maas.py
│ │ │ └── yandex.py
│ │ ├── docstore/
│ │ │ ├── __init__.py
│ │ │ ├── arbitrary_fn.py
│ │ │ ├── base.py
│ │ │ ├── document.py
│ │ │ ├── in_memory.py
│ │ │ └── wikipedia.py
│ │ ├── document_loaders/
│ │ │ ├── __init__.py
│ │ │ ├── acreom.py
│ │ │ ├── airbyte.py
│ │ │ ├── airbyte_json.py
│ │ │ ├── airtable.py
│ │ │ ├── apify_dataset.py
│ │ │ ├── arcgis_loader.py
│ │ │ ├── arxiv.py
│ │ │ ├── assemblyai.py
│ │ │ ├── async_html.py
│ │ │ ├── azlyrics.py
│ │ │ ├── azure_ai_data.py
│ │ │ ├── azure_blob_storage_container.py
│ │ │ ├── azure_blob_storage_file.py
│ │ │ ├── baiducloud_bos_directory.py
│ │ │ ├── baiducloud_bos_file.py
│ │ │ ├── base.py
│ │ │ ├── base_o365.py
│ │ │ ├── bibtex.py
│ │ │ ├── bigquery.py
│ │ │ ├── bilibili.py
│ │ │ ├── blackboard.py
│ │ │ ├── blob_loaders/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── file_system.py
│ │ │ │ ├── schema.py
│ │ │ │ └── youtube_audio.py
│ │ │ ├── blockchain.py
│ │ │ ├── brave_search.py
│ │ │ ├── browserless.py
│ │ │ ├── chatgpt.py
│ │ │ ├── chromium.py
│ │ │ ├── college_confidential.py
│ │ │ ├── concurrent.py
│ │ │ ├── confluence.py
│ │ │ ├── conllu.py
│ │ │ ├── couchbase.py
│ │ │ ├── csv_loader.py
│ │ │ ├── cube_semantic.py
│ │ │ ├── datadog_logs.py
│ │ │ ├── dataframe.py
│ │ │ ├── diffbot.py
│ │ │ ├── directory.py
│ │ │ ├── discord.py
│ │ │ ├── docugami.py
│ │ │ ├── docusaurus.py
│ │ │ ├── dropbox.py
│ │ │ ├── duckdb_loader.py
│ │ │ ├── email.py
│ │ │ ├── epub.py
│ │ │ ├── etherscan.py
│ │ │ ├── evernote.py
│ │ │ ├── excel.py
│ │ │ ├── facebook_chat.py
│ │ │ ├── fauna.py
│ │ │ ├── figma.py
│ │ │ ├── gcs_directory.py
│ │ │ ├── gcs_file.py
│ │ │ ├── generic.py
│ │ │ ├── geodataframe.py
│ │ │ ├── git.py
│ │ │ ├── gitbook.py
│ │ │ ├── github.py
│ │ │ ├── google_speech_to_text.py
│ │ │ ├── googledrive.py
│ │ │ ├── gutenberg.py
│ │ │ ├── helpers.py
│ │ │ ├── hn.py
│ │ │ ├── html.py
│ │ │ ├── html_bs.py
│ │ │ ├── hugging_face_dataset.py
│ │ │ ├── ifixit.py
│ │ │ ├── image.py
│ │ │ ├── image_captions.py
│ │ │ ├── imsdb.py
│ │ │ ├── iugu.py
│ │ │ ├── joplin.py
│ │ │ ├── json_loader.py
│ │ │ ├── lakefs.py
│ │ │ ├── larksuite.py
│ │ │ ├── markdown.py
│ │ │ ├── mastodon.py
│ │ │ ├── max_compute.py
│ │ │ ├── mediawikidump.py
│ │ │ ├── merge.py
│ │ │ ├── mhtml.py
│ │ │ ├── modern_treasury.py
│ │ │ ├── mongodb.py
│ │ │ ├── news.py
│ │ │ ├── notebook.py
│ │ │ ├── notion.py
│ │ │ ├── notiondb.py
│ │ │ ├── nuclia.py
│ │ │ ├── obs_directory.py
│ │ │ ├── obs_file.py
│ │ │ ├── obsidian.py
│ │ │ ├── odt.py
│ │ │ ├── onedrive.py
│ │ │ ├── onedrive_file.py
│ │ │ ├── onenote.py
│ │ │ ├── open_city_data.py
│ │ │ ├── org_mode.py
│ │ │ ├── parsers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── audio.py
│ │ │ │ ├── docai.py
│ │ │ │ ├── generic.py
│ │ │ │ ├── grobid.py
│ │ │ │ ├── html/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── bs4.py
│ │ │ │ ├── language/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── cobol.py
│ │ │ │ │ ├── code_segmenter.py
│ │ │ │ │ ├── javascript.py
│ │ │ │ │ ├── language_parser.py
│ │ │ │ │ └── python.py
│ │ │ │ ├── msword.py
│ │ │ │ ├── pdf.py
│ │ │ │ ├── registry.py
│ │ │ │ └── txt.py
│ │ │ ├── pdf.py
│ │ │ ├── polars_dataframe.py
│ │ │ ├── powerpoint.py
│ │ │ ├── psychic.py
│ │ │ ├── pubmed.py
│ │ │ ├── pyspark_dataframe.py
│ │ │ ├── python.py
│ │ │ ├── quip.py
│ │ │ ├── readthedocs.py
│ │ │ ├── recursive_url_loader.py
│ │ │ ├── reddit.py
│ │ │ ├── roam.py
│ │ │ ├── rocksetdb.py
│ │ │ ├── rspace.py
│ │ │ ├── rss.py
│ │ │ ├── rst.py
│ │ │ ├── rtf.py
│ │ │ ├── s3_directory.py
│ │ │ ├── s3_file.py
│ │ │ ├── sharepoint.py
│ │ │ ├── sitemap.py
│ │ │ ├── slack_directory.py
│ │ │ ├── snowflake_loader.py
│ │ │ ├── spreedly.py
│ │ │ ├── srt.py
│ │ │ ├── stripe.py
│ │ │ ├── telegram.py
│ │ │ ├── tencent_cos_directory.py
│ │ │ ├── tencent_cos_file.py
│ │ │ ├── tensorflow_datasets.py
│ │ │ ├── text.py
│ │ │ ├── tomarkdown.py
│ │ │ ├── toml.py
│ │ │ ├── trello.py
│ │ │ ├── tsv.py
│ │ │ ├── twitter.py
│ │ │ ├── unstructured.py
│ │ │ ├── url.py
│ │ │ ├── url_playwright.py
│ │ │ ├── url_selenium.py
│ │ │ ├── weather.py
│ │ │ ├── web_base.py
│ │ │ ├── whatsapp_chat.py
│ │ │ ├── wikipedia.py
│ │ │ ├── word_document.py
│ │ │ ├── xml.py
│ │ │ ├── xorbits.py
│ │ │ └── youtube.py
│ │ ├── document_transformers/
│ │ │ ├── __init__.py
│ │ │ ├── beautiful_soup_transformer.py
│ │ │ ├── doctran_text_extract.py
│ │ │ ├── doctran_text_qa.py
│ │ │ ├── doctran_text_translate.py
│ │ │ ├── embeddings_redundant_filter.py
│ │ │ ├── google_translate.py
│ │ │ ├── html2text.py
│ │ │ ├── long_context_reorder.py
│ │ │ ├── nuclia_text_transform.py
│ │ │ ├── openai_functions.py
│ │ │ └── xsl/
│ │ │ └── html_chunks_with_headers.xslt
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ ├── aleph_alpha.py
│ │ │ ├── awa.py
│ │ │ ├── azure_openai.py
│ │ │ ├── baidu_qianfan_endpoint.py
│ │ │ ├── base.py
│ │ │ ├── bedrock.py
│ │ │ ├── bookend.py
│ │ │ ├── cache.py
│ │ │ ├── clarifai.py
│ │ │ ├── cloudflare_workersai.py
│ │ │ ├── cohere.py
│ │ │ ├── dashscope.py
│ │ │ ├── databricks.py
│ │ │ ├── deepinfra.py
│ │ │ ├── edenai.py
│ │ │ ├── elasticsearch.py
│ │ │ ├── embaas.py
│ │ │ ├── ernie.py
│ │ │ ├── fake.py
│ │ │ ├── fastembed.py
│ │ │ ├── google_palm.py
│ │ │ ├── gpt4all.py
│ │ │ ├── gradient_ai.py
│ │ │ ├── huggingface.py
│ │ │ ├── huggingface_hub.py
│ │ │ ├── infinity.py
│ │ │ ├── javelin_ai_gateway.py
│ │ │ ├── jina.py
│ │ │ ├── johnsnowlabs.py
│ │ │ ├── llamacpp.py
│ │ │ ├── llm_rails.py
│ │ │ ├── localai.py
│ │ │ ├── minimax.py
│ │ │ ├── mlflow.py
│ │ │ ├── mlflow_gateway.py
│ │ │ ├── modelscope_hub.py
│ │ │ ├── mosaicml.py
│ │ │ ├── nlpcloud.py
│ │ │ ├── octoai_embeddings.py
│ │ │ ├── ollama.py
│ │ │ ├── openai.py
│ │ │ ├── sagemaker_endpoint.py
│ │ │ ├── self_hosted.py
│ │ │ ├── self_hosted_hugging_face.py
│ │ │ ├── sentence_transformer.py
│ │ │ ├── spacy_embeddings.py
│ │ │ ├── tensorflow_hub.py
│ │ │ ├── vertexai.py
│ │ │ ├── voyageai.py
│ │ │ └── xinference.py
│ │ ├── env.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── agents/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── trajectory_eval_chain.py
│ │ │ │ └── trajectory_eval_prompt.py
│ │ │ ├── comparison/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ └── prompt.py
│ │ │ ├── criteria/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ └── prompt.py
│ │ │ ├── embedding_distance/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── exact_match/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── loading.py
│ │ │ ├── parsing/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── json_distance.py
│ │ │ │ └── json_schema.py
│ │ │ ├── qa/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ ├── eval_prompt.py
│ │ │ │ ├── generate_chain.py
│ │ │ │ └── generate_prompt.py
│ │ │ ├── regex_match/
│ │ │ │ ├── __init__.py
│ │ │ │ └── base.py
│ │ │ ├── schema.py
│ │ │ ├── scoring/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── eval_chain.py
│ │ │ │ └── prompt.py
│ │ │ └── string_distance/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── example_generator.py
│ │ ├── formatting.py
│ │ ├── globals.py
│ │ ├── graphs/
│ │ │ ├── __init__.py
│ │ │ ├── arangodb_graph.py
│ │ │ ├── falkordb_graph.py
│ │ │ ├── graph_document.py
│ │ │ ├── graph_store.py
│ │ │ ├── hugegraph.py
│ │ │ ├── kuzu_graph.py
│ │ │ ├── memgraph_graph.py
│ │ │ ├── nebula_graph.py
│ │ │ ├── neo4j_graph.py
│ │ │ ├── neptune_graph.py
│ │ │ ├── networkx_graph.py
│ │ │ └── rdf_graph.py
│ │ ├── hub.py
│ │ ├── indexes/
│ │ │ ├── __init__.py
│ │ │ ├── _api.py
│ │ │ ├── _sql_record_manager.py
│ │ │ ├── graph.py
│ │ │ ├── prompts/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── entity_extraction.py
│ │ │ │ ├── entity_summarization.py
│ │ │ │ └── knowledge_triplet_extraction.py
│ │ │ └── vectorstore.py
│ │ ├── input.py
│ │ ├── llms/
│ │ │ ├── __init__.py
│ │ │ ├── ai21.py
│ │ │ ├── aleph_alpha.py
│ │ │ ├── amazon_api_gateway.py
│ │ │ ├── anthropic.py
│ │ │ ├── anyscale.py
│ │ │ ├── arcee.py
│ │ │ ├── aviary.py
│ │ │ ├── azureml_endpoint.py
│ │ │ ├── baidu_qianfan_endpoint.py
│ │ │ ├── bananadev.py
│ │ │ ├── base.py
│ │ │ ├── baseten.py
│ │ │ ├── beam.py
│ │ │ ├── bedrock.py
│ │ │ ├── bittensor.py
│ │ │ ├── cerebriumai.py
│ │ │ ├── chatglm.py
│ │ │ ├── clarifai.py
│ │ │ ├── cloudflare_workersai.py
│ │ │ ├── cohere.py
│ │ │ ├── ctransformers.py
│ │ │ ├── ctranslate2.py
│ │ │ ├── databricks.py
│ │ │ ├── deepinfra.py
│ │ │ ├── deepsparse.py
│ │ │ ├── edenai.py
│ │ │ ├── fake.py
│ │ │ ├── fireworks.py
│ │ │ ├── forefrontai.py
│ │ │ ├── gigachat.py
│ │ │ ├── google_palm.py
│ │ │ ├── gooseai.py
│ │ │ ├── gpt4all.py
│ │ │ ├── gradient_ai.py
│ │ │ ├── grammars/
│ │ │ │ ├── json.gbnf
│ │ │ │ └── list.gbnf
│ │ │ ├── huggingface_endpoint.py
│ │ │ ├── huggingface_hub.py
│ │ │ ├── huggingface_pipeline.py
│ │ │ ├── huggingface_text_gen_inference.py
│ │ │ ├── human.py
│ │ │ ├── javelin_ai_gateway.py
│ │ │ ├── koboldai.py
│ │ │ ├── llamacpp.py
│ │ │ ├── loading.py
│ │ │ ├── manifest.py
│ │ │ ├── minimax.py
│ │ │ ├── mlflow.py
│ │ │ ├── mlflow_ai_gateway.py
│ │ │ ├── modal.py
│ │ │ ├── mosaicml.py
│ │ │ ├── nlpcloud.py
│ │ │ ├── octoai_endpoint.py
│ │ │ ├── ollama.py
│ │ │ ├── opaqueprompts.py
│ │ │ ├── openai.py
│ │ │ ├── openllm.py
│ │ │ ├── openlm.py
│ │ │ ├── pai_eas_endpoint.py
│ │ │ ├── petals.py
│ │ │ ├── pipelineai.py
│ │ │ ├── predibase.py
│ │ │ ├── predictionguard.py
│ │ │ ├── promptlayer_openai.py
│ │ │ ├── replicate.py
│ │ │ ├── rwkv.py
│ │ │ ├── sagemaker_endpoint.py
│ │ │ ├── self_hosted.py
│ │ │ ├── self_hosted_hugging_face.py
│ │ │ ├── stochasticai.py
│ │ │ ├── symblai_nebula.py
│ │ │ ├── textgen.py
│ │ │ ├── titan_takeoff.py
│ │ │ ├── titan_takeoff_pro.py
│ │ │ ├── together.py
│ │ │ ├── tongyi.py
│ │ │ ├── utils.py
│ │ │ ├── vertexai.py
│ │ │ ├── vllm.py
│ │ │ ├── volcengine_maas.py
│ │ │ ├── watsonxllm.py
│ │ │ ├── writer.py
│ │ │ ├── xinference.py
│ │ │ └── yandex.py
│ │ ├── load/
│ │ │ ├── __init__.py
│ │ │ ├── dump.py
│ │ │ ├── load.py
│ │ │ └── serializable.py
│ │ ├── memory/
│ │ │ ├── __init__.py
│ │ │ ├── buffer.py
│ │ │ ├── buffer_window.py
│ │ │ ├── chat_memory.py
│ │ │ ├── chat_message_histories/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── astradb.py
│ │ │ │ ├── cassandra.py
│ │ │ │ ├── cosmos_db.py
│ │ │ │ ├── dynamodb.py
│ │ │ │ ├── elasticsearch.py
│ │ │ │ ├── file.py
│ │ │ │ ├── firestore.py
│ │ │ │ ├── in_memory.py
│ │ │ │ ├── momento.py
│ │ │ │ ├── mongodb.py
│ │ │ │ ├── neo4j.py
│ │ │ │ ├── postgres.py
│ │ │ │ ├── redis.py
│ │ │ │ ├── rocksetdb.py
│ │ │ │ ├── singlestoredb.py
│ │ │ │ ├── sql.py
│ │ │ │ ├── streamlit.py
│ │ │ │ ├── upstash_redis.py
│ │ │ │ ├── xata.py
│ │ │ │ └── zep.py
│ │ │ ├── combined.py
│ │ │ ├── entity.py
│ │ │ ├── kg.py
│ │ │ ├── motorhead_memory.py
│ │ │ ├── prompt.py
│ │ │ ├── readonly.py
│ │ │ ├── simple.py
│ │ │ ├── summary.py
│ │ │ ├── summary_buffer.py
│ │ │ ├── token_buffer.py
│ │ │ ├── utils.py
│ │ │ ├── vectorstore.py
│ │ │ ├── vectorstore_token_buffer_memory.py
│ │ │ └── zep_memory.py
│ │ ├── model_laboratory.py
│ │ ├── output_parsers/
│ │ │ ├── __init__.py
│ │ │ ├── boolean.py
│ │ │ ├── combining.py
│ │ │ ├── datetime.py
│ │ │ ├── enum.py
│ │ │ ├── ernie_functions.py
│ │ │ ├── fix.py
│ │ │ ├── format_instructions.py
│ │ │ ├── json.py
│ │ │ ├── list.py
│ │ │ ├── loading.py
│ │ │ ├── openai_functions.py
│ │ │ ├── openai_tools.py
│ │ │ ├── pandas_dataframe.py
│ │ │ ├── prompts.py
│ │ │ ├── pydantic.py
│ │ │ ├── rail_parser.py
│ │ │ ├── regex.py
│ │ │ ├── regex_dict.py
│ │ │ ├── retry.py
│ │ │ ├── structured.py
│ │ │ ├── xml.py
│ │ │ └── yaml.py
│ │ ├── prompts/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── chat.py
│ │ │ ├── example_selector/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── length_based.py
│ │ │ │ ├── ngram_overlap.py
│ │ │ │ └── semantic_similarity.py
│ │ │ ├── few_shot.py
│ │ │ ├── few_shot_with_templates.py
│ │ │ ├── loading.py
│ │ │ └── prompt.py
│ │ ├── py.typed
│ │ ├── python.py
│ │ ├── requests.py
│ │ ├── retrievers/
│ │ │ ├── __init__.py
│ │ │ ├── arcee.py
│ │ │ ├── arxiv.py
│ │ │ ├── azure_ai_search.py
│ │ │ ├── bedrock.py
│ │ │ ├── bm25.py
│ │ │ ├── chaindesk.py
│ │ │ ├── chatgpt_plugin_retriever.py
│ │ │ ├── cohere_rag_retriever.py
│ │ │ ├── contextual_compression.py
│ │ │ ├── databerry.py
│ │ │ ├── docarray.py
│ │ │ ├── document_compressors/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── chain_extract.py
│ │ │ │ ├── chain_extract_prompt.py
│ │ │ │ ├── chain_filter.py
│ │ │ │ ├── chain_filter_prompt.py
│ │ │ │ ├── cohere_rerank.py
│ │ │ │ ├── cross_encoder.py
│ │ │ │ ├── cross_encoder_rerank.py
│ │ │ │ ├── embeddings_filter.py
│ │ │ │ ├── flashrank_rerank.py
│ │ │ │ └── listwise_rerank.py
│ │ │ ├── elastic_search_bm25.py
│ │ │ ├── embedchain.py
│ │ │ ├── ensemble.py
│ │ │ ├── google_cloud_documentai_warehouse.py
│ │ │ ├── google_vertex_ai_search.py
│ │ │ ├── kay.py
│ │ │ ├── kendra.py
│ │ │ ├── knn.py
│ │ │ ├── llama_index.py
│ │ │ ├── merger_retriever.py
│ │ │ ├── metal.py
│ │ │ ├── milvus.py
│ │ │ ├── multi_query.py
│ │ │ ├── multi_vector.py
│ │ │ ├── outline.py
│ │ │ ├── parent_document_retriever.py
│ │ │ ├── pinecone_hybrid_search.py
│ │ │ ├── pubmed.py
│ │ │ ├── pupmed.py
│ │ │ ├── re_phraser.py
│ │ │ ├── remote_retriever.py
│ │ │ ├── self_query/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── astradb.py
│ │ │ │ ├── base.py
│ │ │ │ ├── chroma.py
│ │ │ │ ├── dashvector.py
│ │ │ │ ├── databricks_vector_search.py
│ │ │ │ ├── deeplake.py
│ │ │ │ ├── dingo.py
│ │ │ │ ├── elasticsearch.py
│ │ │ │ ├── milvus.py
│ │ │ │ ├── mongodb_atlas.py
│ │ │ │ ├── myscale.py
│ │ │ │ ├── opensearch.py
│ │ │ │ ├── pgvector.py
│ │ │ │ ├── pinecone.py
│ │ │ │ ├── qdrant.py
│ │ │ │ ├── redis.py
│ │ │ │ ├── supabase.py
│ │ │ │ ├── tencentvectordb.py
│ │ │ │ ├── timescalevector.py
│ │ │ │ ├── vectara.py
│ │ │ │ └── weaviate.py
│ │ │ ├── svm.py
│ │ │ ├── tavily_search_api.py
│ │ │ ├── tfidf.py
│ │ │ ├── time_weighted_retriever.py
│ │ │ ├── vespa_retriever.py
│ │ │ ├── weaviate_hybrid_search.py
│ │ │ ├── web_research.py
│ │ │ ├── wikipedia.py
│ │ │ ├── you.py
│ │ │ ├── zep.py
│ │ │ └── zilliz.py
│ │ ├── runnables/
│ │ │ ├── __init__.py
│ │ │ ├── hub.py
│ │ │ └── openai_functions.py
│ │ ├── schema/
│ │ │ ├── __init__.py
│ │ │ ├── agent.py
│ │ │ ├── cache.py
│ │ │ ├── callbacks/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── manager.py
│ │ │ │ ├── stdout.py
│ │ │ │ ├── streaming_stdout.py
│ │ │ │ └── tracers/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── evaluation.py
│ │ │ │ ├── langchain.py
│ │ │ │ ├── log_stream.py
│ │ │ │ ├── root_listeners.py
│ │ │ │ ├── run_collector.py
│ │ │ │ ├── schemas.py
│ │ │ │ └── stdout.py
│ │ │ ├── chat.py
│ │ │ ├── chat_history.py
│ │ │ ├── document.py
│ │ │ ├── embeddings.py
│ │ │ ├── exceptions.py
│ │ │ ├── language_model.py
│ │ │ ├── memory.py
│ │ │ ├── messages.py
│ │ │ ├── output.py
│ │ │ ├── output_parser.py
│ │ │ ├── prompt.py
│ │ │ ├── prompt_template.py
│ │ │ ├── retriever.py
│ │ │ ├── runnable/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── branch.py
│ │ │ │ ├── config.py
│ │ │ │ ├── configurable.py
│ │ │ │ ├── fallbacks.py
│ │ │ │ ├── history.py
│ │ │ │ ├── passthrough.py
│ │ │ │ ├── retry.py
│ │ │ │ ├── router.py
│ │ │ │ └── utils.py
│ │ │ ├── storage.py
│ │ │ └── vectorstore.py
│ │ ├── serpapi.py
│ │ ├── smith/
│ │ │ ├── __init__.py
│ │ │ └── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── config.py
│ │ │ ├── name_generation.py
│ │ │ ├── progress.py
│ │ │ ├── runner_utils.py
│ │ │ └── string_run_evaluator.py
│ │ ├── sql_database.py
│ │ ├── storage/
│ │ │ ├── __init__.py
│ │ │ ├── _lc_store.py
│ │ │ ├── encoder_backed.py
│ │ │ ├── exceptions.py
│ │ │ ├── file_system.py
│ │ │ ├── in_memory.py
│ │ │ ├── redis.py
│ │ │ └── upstash_redis.py
│ │ ├── text_splitter.py
│ │ ├── tools/
│ │ │ ├── __init__.py
│ │ │ ├── ainetwork/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── app.py
│ │ │ │ ├── base.py
│ │ │ │ ├── owner.py
│ │ │ │ ├── rule.py
│ │ │ │ ├── transfer.py
│ │ │ │ └── value.py
│ │ │ ├── amadeus/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── closest_airport.py
│ │ │ │ └── flight_search.py
│ │ │ ├── arxiv/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── azure_cognitive_services/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── form_recognizer.py
│ │ │ │ ├── image_analysis.py
│ │ │ │ ├── speech2text.py
│ │ │ │ ├── text2speech.py
│ │ │ │ └── text_analytics_health.py
│ │ │ ├── base.py
│ │ │ ├── bearly/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── bing_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── brave_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── clickup/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── convert_to_openai.py
│ │ │ ├── dataforseo_api_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── ddg_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── e2b_data_analysis/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── edenai/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── audio_speech_to_text.py
│ │ │ │ ├── audio_text_to_speech.py
│ │ │ │ ├── edenai_base_tool.py
│ │ │ │ ├── image_explicitcontent.py
│ │ │ │ ├── image_objectdetection.py
│ │ │ │ ├── ocr_identityparser.py
│ │ │ │ ├── ocr_invoiceparser.py
│ │ │ │ └── text_moderation.py
│ │ │ ├── eleven_labs/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── models.py
│ │ │ │ └── text2speech.py
│ │ │ ├── file_management/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── copy.py
│ │ │ │ ├── delete.py
│ │ │ │ ├── file_search.py
│ │ │ │ ├── list_dir.py
│ │ │ │ ├── move.py
│ │ │ │ ├── read.py
│ │ │ │ └── write.py
│ │ │ ├── github/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── gitlab/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── gmail/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── create_draft.py
│ │ │ │ ├── get_message.py
│ │ │ │ ├── get_thread.py
│ │ │ │ ├── search.py
│ │ │ │ └── send_message.py
│ │ │ ├── golden_query/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_cloud/
│ │ │ │ ├── __init__.py
│ │ │ │ └── texttospeech.py
│ │ │ ├── google_finance/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_jobs/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_lens/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_places/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_scholar/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_serper/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── google_trends/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── graphql/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── human/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── ifttt.py
│ │ │ ├── interaction/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── jira/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── json/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── memorize/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── merriam_webster/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── metaphor_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── multion/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── close_session.py
│ │ │ │ ├── create_session.py
│ │ │ │ └── update_session.py
│ │ │ ├── nasa/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── nuclia/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── office365/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── create_draft_message.py
│ │ │ │ ├── events_search.py
│ │ │ │ ├── messages_search.py
│ │ │ │ ├── send_event.py
│ │ │ │ └── send_message.py
│ │ │ ├── openapi/
│ │ │ │ ├── __init__.py
│ │ │ │ └── utils/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── api_models.py
│ │ │ │ └── openapi_utils.py
│ │ │ ├── openweathermap/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── playwright/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── click.py
│ │ │ │ ├── current_page.py
│ │ │ │ ├── extract_hyperlinks.py
│ │ │ │ ├── extract_text.py
│ │ │ │ ├── get_elements.py
│ │ │ │ ├── navigate.py
│ │ │ │ └── navigate_back.py
│ │ │ ├── plugin.py
│ │ │ ├── powerbi/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── pubmed/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── python/
│ │ │ │ └── __init__.py
│ │ │ ├── reddit_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── render.py
│ │ │ ├── requests/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── retriever.py
│ │ │ ├── scenexplain/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── searchapi/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── searx_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── shell/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── slack/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base.py
│ │ │ │ ├── get_channel.py
│ │ │ │ ├── get_message.py
│ │ │ │ ├── schedule_message.py
│ │ │ │ └── send_message.py
│ │ │ ├── sleep/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── spark_sql/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── sql_database/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── prompt.py
│ │ │ │ └── tool.py
│ │ │ ├── stackexchange/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── steam/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── steamship_image_generation/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── tavily_search/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── vectorstore/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── wikipedia/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── wolfram_alpha/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tool.py
│ │ │ ├── yahoo_finance_news.py
│ │ │ ├── youtube/
│ │ │ │ ├── __init__.py
│ │ │ │ └── search.py
│ │ │ └── zapier/
│ │ │ ├── __init__.py
│ │ │ └── tool.py
│ │ ├── utilities/
│ │ │ ├── __init__.py
│ │ │ ├── alpha_vantage.py
│ │ │ ├── anthropic.py
│ │ │ ├── apify.py
│ │ │ ├── arcee.py
│ │ │ ├── arxiv.py
│ │ │ ├── asyncio.py
│ │ │ ├── awslambda.py
│ │ │ ├── bibtex.py
│ │ │ ├── bing_search.py
│ │ │ ├── brave_search.py
│ │ │ ├── clickup.py
│ │ │ ├── dalle_image_generator.py
│ │ │ ├── dataforseo_api_search.py
│ │ │ ├── duckduckgo_search.py
│ │ │ ├── github.py
│ │ │ ├── gitlab.py
│ │ │ ├── golden_query.py
│ │ │ ├── google_finance.py
│ │ │ ├── google_jobs.py
│ │ │ ├── google_lens.py
│ │ │ ├── google_places_api.py
│ │ │ ├── google_scholar.py
│ │ │ ├── google_search.py
│ │ │ ├── google_serper.py
│ │ │ ├── google_trends.py
│ │ │ ├── graphql.py
│ │ │ ├── jira.py
│ │ │ ├── max_compute.py
│ │ │ ├── merriam_webster.py
│ │ │ ├── metaphor_search.py
│ │ │ ├── nasa.py
│ │ │ ├── opaqueprompts.py
│ │ │ ├── openapi.py
│ │ │ ├── openweathermap.py
│ │ │ ├── outline.py
│ │ │ ├── portkey.py
│ │ │ ├── powerbi.py
│ │ │ ├── pubmed.py
│ │ │ ├── python.py
│ │ │ ├── reddit_search.py
│ │ │ ├── redis.py
│ │ │ ├── requests.py
│ │ │ ├── scenexplain.py
│ │ │ ├── searchapi.py
│ │ │ ├── searx_search.py
│ │ │ ├── serpapi.py
│ │ │ ├── spark_sql.py
│ │ │ ├── sql_database.py
│ │ │ ├── stackexchange.py
│ │ │ ├── steam.py
│ │ │ ├── tavily_search.py
│ │ │ ├── tensorflow_datasets.py
│ │ │ ├── twilio.py
│ │ │ ├── vertexai.py
│ │ │ ├── wikipedia.py
│ │ │ ├── wolfram_alpha.py
│ │ │ └── zapier.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── aiter.py
│ │ │ ├── env.py
│ │ │ ├── ernie_functions.py
│ │ │ ├── formatting.py
│ │ │ ├── html.py
│ │ │ ├── input.py
│ │ │ ├── iter.py
│ │ │ ├── json_schema.py
│ │ │ ├── math.py
│ │ │ ├── openai.py
│ │ │ ├── openai_functions.py
│ │ │ ├── pydantic.py
│ │ │ ├── strings.py
│ │ │ └── utils.py
│ │ └── vectorstores/
│ │ ├── __init__.py
│ │ ├── alibabacloud_opensearch.py
│ │ ├── analyticdb.py
│ │ ├── annoy.py
│ │ ├── astradb.py
│ │ ├── atlas.py
│ │ ├── awadb.py
│ │ ├── azure_cosmos_db.py
│ │ ├── azuresearch.py
│ │ ├── bageldb.py
│ │ ├── baiducloud_vector_search.py
│ │ ├── base.py
│ │ ├── cassandra.py
│ │ ├── chroma.py
│ │ ├── clarifai.py
│ │ ├── clickhouse.py
│ │ ├── dashvector.py
│ │ ├── databricks_vector_search.py
│ │ ├── deeplake.py
│ │ ├── dingo.py
│ │ ├── docarray/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── hnsw.py
│ │ │ └── in_memory.py
│ │ ├── elastic_vector_search.py
│ │ ├── elasticsearch.py
│ │ ├── epsilla.py
│ │ ├── faiss.py
│ │ ├── hippo.py
│ │ ├── hologres.py
│ │ ├── lancedb.py
│ │ ├── llm_rails.py
│ │ ├── marqo.py
│ │ ├── matching_engine.py
│ │ ├── meilisearch.py
│ │ ├── milvus.py
│ │ ├── momento_vector_index.py
│ │ ├── mongodb_atlas.py
│ │ ├── myscale.py
│ │ ├── neo4j_vector.py
│ │ ├── nucliadb.py
│ │ ├── opensearch_vector_search.py
│ │ ├── pgembedding.py
│ │ ├── pgvecto_rs.py
│ │ ├── pgvector.py
│ │ ├── pinecone.py
│ │ ├── qdrant.py
│ │ ├── redis/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── filters.py
│ │ │ └── schema.py
│ │ ├── rocksetdb.py
│ │ ├── scann.py
│ │ ├── semadb.py
│ │ ├── singlestoredb.py
│ │ ├── sklearn.py
│ │ ├── sqlitevss.py
│ │ ├── starrocks.py
│ │ ├── supabase.py
│ │ ├── tair.py
│ │ ├── tencentvectordb.py
│ │ ├── tiledb.py
│ │ ├── timescalevector.py
│ │ ├── typesense.py
│ │ ├── usearch.py
│ │ ├── utils.py
│ │ ├── vald.py
│ │ ├── vearch.py
│ │ ├── vectara.py
│ │ ├── vespa.py
│ │ ├── weaviate.py
│ │ ├── xata.py
│ │ ├── yellowbrick.py
│ │ ├── zep.py
│ │ └── zilliz.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── data.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ ├── cache/
│ │ │ ├── __init__.py
│ │ │ └── fake_embeddings.py
│ │ ├── chains/
│ │ │ ├── __init__.py
│ │ │ └── openai_functions/
│ │ │ ├── __init__.py
│ │ │ └── test_openapi.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── conftest.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ └── embedding_distance/
│ │ │ ├── __init__.py
│ │ │ └── test_embedding.py
│ │ ├── examples/
│ │ │ ├── README.org
│ │ │ ├── README.rst
│ │ │ ├── brandfetch-brandfetch-2.0.0-resolved.json
│ │ │ ├── default-encoding.py
│ │ │ ├── example-utf8.html
│ │ │ ├── example.html
│ │ │ ├── example.json
│ │ │ ├── example.mht
│ │ │ ├── facebook_chat.json
│ │ │ ├── factbook.xml
│ │ │ ├── fake-email-attachment.eml
│ │ │ ├── fake.odt
│ │ │ ├── hello.msg
│ │ │ ├── hello_world.js
│ │ │ ├── hello_world.py
│ │ │ ├── non-utf8-encoding.py
│ │ │ ├── sample_rss_feeds.opml
│ │ │ ├── sitemap.xml
│ │ │ ├── stanley-cups.csv
│ │ │ ├── stanley-cups.tsv
│ │ │ ├── stanley-cups.xlsx
│ │ │ └── whatsapp_chat.txt
│ │ ├── memory/
│ │ │ ├── __init__.py
│ │ │ └── docker-compose/
│ │ │ └── elasticsearch.yml
│ │ ├── prompts/
│ │ │ └── __init__.py
│ │ ├── retrievers/
│ │ │ └── document_compressors/
│ │ │ ├── __init__.py
│ │ │ ├── test_cohere_reranker.py
│ │ │ └── test_listwise_rerank.py
│ │ ├── test_compile.py
│ │ ├── test_hub.py
│ │ └── test_schema.py
│ ├── mock_servers/
│ │ ├── __init__.py
│ │ └── robot/
│ │ ├── __init__.py
│ │ └── server.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── _api/
│ │ ├── __init__.py
│ │ └── test_importing.py
│ ├── agents/
│ │ ├── __init__.py
│ │ ├── agent_toolkits/
│ │ │ ├── __init__.py
│ │ │ └── test_imports.py
│ │ ├── format_scratchpad/
│ │ │ ├── __init__.py
│ │ │ ├── test_log.py
│ │ │ ├── test_log_to_messages.py
│ │ │ ├── test_openai_functions.py
│ │ │ ├── test_openai_tools.py
│ │ │ └── test_xml.py
│ │ ├── output_parsers/
│ │ │ ├── __init__.py
│ │ │ ├── test_convo_output_parser.py
│ │ │ ├── test_json.py
│ │ │ ├── test_openai_functions.py
│ │ │ ├── test_react_json_single_input.py
│ │ │ ├── test_react_single_input.py
│ │ │ ├── test_self_ask.py
│ │ │ └── test_xml.py
│ │ ├── test_agent.py
│ │ ├── test_agent_async.py
│ │ ├── test_agent_iterator.py
│ │ ├── test_chat.py
│ │ ├── test_imports.py
│ │ ├── test_initialize.py
│ │ ├── test_mrkl.py
│ │ ├── test_mrkl_output_parser.py
│ │ ├── test_openai_assistant.py
│ │ ├── test_openai_functions_multi.py
│ │ ├── test_public_api.py
│ │ ├── test_structured_chat.py
│ │ └── test_types.py
│ ├── callbacks/
│ │ ├── __init__.py
│ │ ├── fake_callback_handler.py
│ │ ├── test_base.py
│ │ ├── test_file.py
│ │ ├── test_imports.py
│ │ ├── test_manager.py
│ │ ├── test_stdout.py
│ │ └── tracers/
│ │ ├── __init__.py
│ │ └── test_logging.py
│ ├── chains/
│ │ ├── __init__.py
│ │ ├── query_constructor/
│ │ │ ├── __init__.py
│ │ │ └── test_parser.py
│ │ ├── question_answering/
│ │ │ ├── __init__.py
│ │ │ └── test_map_rerank_prompt.py
│ │ ├── test_base.py
│ │ ├── test_combine_documents.py
│ │ ├── test_constitutional_ai.py
│ │ ├── test_conversation.py
│ │ ├── test_conversation_retrieval.py
│ │ ├── test_flare.py
│ │ ├── test_history_aware_retriever.py
│ │ ├── test_hyde.py
│ │ ├── test_imports.py
│ │ ├── test_llm_checker.py
│ │ ├── test_llm_math.py
│ │ ├── test_llm_summarization_checker.py
│ │ ├── test_memory.py
│ │ ├── test_qa_with_sources.py
│ │ ├── test_retrieval.py
│ │ ├── test_sequential.py
│ │ ├── test_summary_buffer_memory.py
│ │ └── test_transform.py
│ ├── chat_models/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ └── test_imports.py
│ ├── conftest.py
│ ├── data/
│ │ ├── prompt_file.txt
│ │ └── prompts/
│ │ ├── prompt_extra_args.json
│ │ ├── prompt_missing_args.json
│ │ └── simple_prompt.json
│ ├── docstore/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── document_loaders/
│ │ ├── __init__.py
│ │ ├── blob_loaders/
│ │ │ ├── __init__.py
│ │ │ └── test_public_api.py
│ │ ├── parsers/
│ │ │ ├── __init__.py
│ │ │ └── test_public_api.py
│ │ ├── test_base.py
│ │ └── test_imports.py
│ ├── document_transformers/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── embeddings/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_caching.py
│ │ └── test_imports.py
│ ├── evaluation/
│ │ ├── __init__.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── comparison/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── criteria/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── exact_match/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── parsing/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_json_distance.py
│ │ │ └── test_json_schema.py
│ │ ├── qa/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── regex_match/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── run_evaluators/
│ │ │ └── __init__.py
│ │ ├── scoring/
│ │ │ ├── __init__.py
│ │ │ └── test_eval_chain.py
│ │ ├── string_distance/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ └── test_imports.py
│ ├── examples/
│ │ ├── example-non-utf8.csv
│ │ ├── example-non-utf8.txt
│ │ ├── example-utf8.csv
│ │ ├── example-utf8.txt
│ │ └── test_specs/
│ │ ├── apis-guru/
│ │ │ └── apispec.json
│ │ ├── biztoc/
│ │ │ └── apispec.json
│ │ ├── calculator/
│ │ │ └── apispec.json
│ │ ├── datasette/
│ │ │ └── apispec.json
│ │ ├── freetv-app/
│ │ │ └── apispec.json
│ │ ├── joinmilo/
│ │ │ └── apispec.json
│ │ ├── klarna/
│ │ │ └── apispec.json
│ │ ├── milo/
│ │ │ └── apispec.json
│ │ ├── quickchart/
│ │ │ └── apispec.json
│ │ ├── robot/
│ │ │ └── apispec.yaml
│ │ ├── robot_openapi.yaml
│ │ ├── schooldigger/
│ │ │ └── apispec.json
│ │ ├── shop/
│ │ │ └── apispec.json
│ │ ├── slack/
│ │ │ └── apispec.json
│ │ ├── speak/
│ │ │ └── apispec.json
│ │ ├── urlbox/
│ │ │ └── apispec.json
│ │ ├── wellknown/
│ │ │ └── apispec.json
│ │ ├── wolframalpha/
│ │ │ └── apispec.json
│ │ ├── wolframcloud/
│ │ │ └── apispec.json
│ │ └── zapier/
│ │ └── apispec.json
│ ├── graphs/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── indexes/
│ │ ├── __init__.py
│ │ ├── test_api.py
│ │ ├── test_imports.py
│ │ └── test_indexing.py
│ ├── llms/
│ │ ├── __init__.py
│ │ ├── fake_chat_model.py
│ │ ├── fake_llm.py
│ │ ├── test_base.py
│ │ ├── test_fake_chat_model.py
│ │ └── test_imports.py
│ ├── load/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_dump.ambr
│ │ ├── test_dump.py
│ │ ├── test_imports.py
│ │ └── test_load.py
│ ├── memory/
│ │ ├── __init__.py
│ │ ├── chat_message_histories/
│ │ │ ├── __init__.py
│ │ │ └── test_imports.py
│ │ ├── test_combined_memory.py
│ │ └── test_imports.py
│ ├── output_parsers/
│ │ ├── __init__.py
│ │ ├── test_boolean_parser.py
│ │ ├── test_combining_parser.py
│ │ ├── test_datetime_parser.py
│ │ ├── test_enum_parser.py
│ │ ├── test_fix.py
│ │ ├── test_imports.py
│ │ ├── test_json.py
│ │ ├── test_pandas_dataframe_parser.py
│ │ ├── test_regex.py
│ │ ├── test_regex_dict.py
│ │ ├── test_retry.py
│ │ ├── test_structured_parser.py
│ │ └── test_yaml_parser.py
│ ├── prompts/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_chat.py
│ │ ├── test_few_shot.py
│ │ ├── test_few_shot_with_templates.py
│ │ ├── test_imports.py
│ │ ├── test_loading.py
│ │ └── test_prompt.py
│ ├── retrievers/
│ │ ├── __init__.py
│ │ ├── document_compressors/
│ │ │ ├── __init__.py
│ │ │ ├── test_chain_extract.py
│ │ │ ├── test_chain_filter.py
│ │ │ └── test_listwise_rerank.py
│ │ ├── parrot_retriever.py
│ │ ├── self_query/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── sequential_retriever.py
│ │ ├── test_ensemble.py
│ │ ├── test_imports.py
│ │ ├── test_multi_query.py
│ │ ├── test_multi_vector.py
│ │ ├── test_parent_document.py
│ │ └── test_time_weighted_retriever.py
│ ├── runnables/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_openai_functions.ambr
│ │ ├── test_hub.py
│ │ └── test_openai_functions.py
│ ├── schema/
│ │ ├── __init__.py
│ │ ├── runnable/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_branch.py
│ │ │ ├── test_config.py
│ │ │ ├── test_configurable.py
│ │ │ ├── test_fallbacks.py
│ │ │ ├── test_history.py
│ │ │ ├── test_imports.py
│ │ │ ├── test_passthrough.py
│ │ │ ├── test_retry.py
│ │ │ ├── test_router.py
│ │ │ └── test_utils.py
│ │ ├── test_agent.py
│ │ ├── test_cache.py
│ │ ├── test_chat.py
│ │ ├── test_chat_history.py
│ │ ├── test_document.py
│ │ ├── test_embeddings.py
│ │ ├── test_exceptions.py
│ │ ├── test_imports.py
│ │ ├── test_language_model.py
│ │ ├── test_memory.py
│ │ ├── test_messages.py
│ │ ├── test_output.py
│ │ ├── test_output_parser.py
│ │ ├── test_prompt.py
│ │ ├── test_prompt_template.py
│ │ ├── test_retriever.py
│ │ ├── test_storage.py
│ │ └── test_vectorstore.py
│ ├── smith/
│ │ ├── __init__.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── test_runner_utils.py
│ │ │ └── test_string_run_evaluator.py
│ │ └── test_imports.py
│ ├── storage/
│ │ ├── __init__.py
│ │ ├── test_filesystem.py
│ │ ├── test_imports.py
│ │ └── test_lc_store.py
│ ├── stubs.py
│ ├── test_dependencies.py
│ ├── test_formatting.py
│ ├── test_globals.py
│ ├── test_imports.py
│ ├── test_pytest_config.py
│ ├── test_schema.py
│ ├── test_utils.py
│ ├── tools/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_imports.py
│ │ └── test_render.py
│ ├── utilities/
│ │ ├── __init__.py
│ │ └── test_imports.py
│ ├── utils/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ ├── test_iter.py
│ │ └── test_openai_functions.py
│ └── vectorstores/
│ ├── __init__.py
│ └── test_public_api.py
├── langchain_v1/
│ ├── LICENSE
│ ├── Makefile
│ ├── README.md
│ ├── extended_testing_deps.txt
│ ├── langchain/
│ │ ├── __init__.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ ├── factory.py
│ │ │ ├── middleware/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _execution.py
│ │ │ │ ├── _redaction.py
│ │ │ │ ├── _retry.py
│ │ │ │ ├── context_editing.py
│ │ │ │ ├── file_search.py
│ │ │ │ ├── human_in_the_loop.py
│ │ │ │ ├── model_call_limit.py
│ │ │ │ ├── model_fallback.py
│ │ │ │ ├── model_retry.py
│ │ │ │ ├── pii.py
│ │ │ │ ├── shell_tool.py
│ │ │ │ ├── summarization.py
│ │ │ │ ├── todo.py
│ │ │ │ ├── tool_call_limit.py
│ │ │ │ ├── tool_emulator.py
│ │ │ │ ├── tool_retry.py
│ │ │ │ ├── tool_selection.py
│ │ │ │ └── types.py
│ │ │ └── structured_output.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ └── base.py
│ │ ├── messages/
│ │ │ └── __init__.py
│ │ ├── py.typed
│ │ ├── rate_limiters/
│ │ │ └── __init__.py
│ │ └── tools/
│ │ ├── __init__.py
│ │ └── tool_node.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── check_version.py
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ ├── agents/
│ │ │ ├── __init__.py
│ │ │ └── middleware/
│ │ │ ├── __init__.py
│ │ │ └── test_shell_tool_integration.py
│ │ ├── cache/
│ │ │ ├── __init__.py
│ │ │ └── fake_embeddings.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ ├── conftest.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ └── test_base.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── agents/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ ├── test_middleware_agent.ambr
│ │ │ ├── test_middleware_decorators.ambr
│ │ │ ├── test_middleware_framework.ambr
│ │ │ └── test_return_direct_graph.ambr
│ │ ├── any_str.py
│ │ ├── compose-postgres.yml
│ │ ├── compose-redis.yml
│ │ ├── conftest.py
│ │ ├── conftest_checkpointer.py
│ │ ├── conftest_store.py
│ │ ├── memory_assert.py
│ │ ├── messages.py
│ │ ├── middleware/
│ │ │ ├── __init__.py
│ │ │ ├── __snapshots__/
│ │ │ │ ├── test_middleware_decorators.ambr
│ │ │ │ ├── test_middleware_diagram.ambr
│ │ │ │ └── test_middleware_framework.ambr
│ │ │ ├── core/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── __snapshots__/
│ │ │ │ │ ├── test_decorators.ambr
│ │ │ │ │ ├── test_diagram.ambr
│ │ │ │ │ └── test_framework.ambr
│ │ │ │ ├── test_composition.py
│ │ │ │ ├── test_decorators.py
│ │ │ │ ├── test_diagram.py
│ │ │ │ ├── test_dynamic_tools.py
│ │ │ │ ├── test_framework.py
│ │ │ │ ├── test_overrides.py
│ │ │ │ ├── test_sync_async_wrappers.py
│ │ │ │ ├── test_tools.py
│ │ │ │ ├── test_wrap_model_call.py
│ │ │ │ ├── test_wrap_model_call_state_update.py
│ │ │ │ └── test_wrap_tool_call.py
│ │ │ └── implementations/
│ │ │ ├── __init__.py
│ │ │ ├── test_context_editing.py
│ │ │ ├── test_file_search.py
│ │ │ ├── test_human_in_the_loop.py
│ │ │ ├── test_model_call_limit.py
│ │ │ ├── test_model_fallback.py
│ │ │ ├── test_model_retry.py
│ │ │ ├── test_pii.py
│ │ │ ├── test_shell_execution_policies.py
│ │ │ ├── test_shell_tool.py
│ │ │ ├── test_structured_output_retry.py
│ │ │ ├── test_summarization.py
│ │ │ ├── test_todo.py
│ │ │ ├── test_tool_call_limit.py
│ │ │ ├── test_tool_emulator.py
│ │ │ ├── test_tool_retry.py
│ │ │ └── test_tool_selection.py
│ │ ├── middleware_typing/
│ │ │ ├── __init__.py
│ │ │ ├── test_middleware_backwards_compat.py
│ │ │ ├── test_middleware_type_errors.py
│ │ │ └── test_middleware_typing.py
│ │ ├── model.py
│ │ ├── specifications/
│ │ │ ├── responses.json
│ │ │ └── return_direct.json
│ │ ├── test_agent_name.py
│ │ ├── test_create_agent_tool_validation.py
│ │ ├── test_fetch_last_ai_and_tool_messages.py
│ │ ├── test_injected_runtime_create_agent.py
│ │ ├── test_kwargs_tool_runtime_injection.py
│ │ ├── test_react_agent.py
│ │ ├── test_response_format.py
│ │ ├── test_response_format_integration.py
│ │ ├── test_responses.py
│ │ ├── test_responses_spec.py
│ │ ├── test_return_direct_graph.py
│ │ ├── test_return_direct_spec.py
│ │ ├── test_state_schema.py
│ │ ├── test_system_message.py
│ │ └── utils.py
│ ├── chat_models/
│ │ ├── __init__.py
│ │ └── test_chat_models.py
│ ├── conftest.py
│ ├── embeddings/
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ └── test_imports.py
│ ├── test_dependencies.py
│ ├── test_imports.py
│ ├── test_pytest_config.py
│ ├── test_version.py
│ └── tools/
│ ├── __init__.py
│ └── test_imports.py
├── model-profiles/
│ ├── Makefile
│ ├── README.md
│ ├── extended_testing_deps.txt
│ ├── langchain_model_profiles/
│ │ ├── __init__.py
│ │ └── cli.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ └── test_cli.py
├── partners/
│ ├── README.md
│ ├── anthropic/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_anthropic/
│ │ │ ├── __init__.py
│ │ │ ├── _client_utils.py
│ │ │ ├── _compat.py
│ │ │ ├── _version.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _profiles.py
│ │ │ │ └── profile_augmentations.toml
│ │ │ ├── experimental.py
│ │ │ ├── llms.py
│ │ │ ├── middleware/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── anthropic_tools.py
│ │ │ │ ├── bash.py
│ │ │ │ ├── file_search.py
│ │ │ │ └── prompt_caching.py
│ │ │ ├── output_parsers.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ ├── check_version.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_llms.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── _utils.py
│ │ ├── middleware/
│ │ │ ├── __init__.py
│ │ │ ├── test_anthropic_tools.py
│ │ │ ├── test_bash.py
│ │ │ ├── test_file_search.py
│ │ │ └── test_prompt_caching.py
│ │ ├── test_chat_models.py
│ │ ├── test_client_utils.py
│ │ ├── test_imports.py
│ │ ├── test_llms.py
│ │ ├── test_output_parsers.py
│ │ └── test_standard.py
│ ├── deepseek/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_deepseek/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ └── test_compile.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ └── test_chat_models.py
│ ├── exa/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_exa/
│ │ │ ├── __init__.py
│ │ │ ├── _utilities.py
│ │ │ ├── py.typed
│ │ │ ├── retrievers.py
│ │ │ └── tools.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_find_similar_tool.py
│ │ │ ├── test_retriever.py
│ │ │ └── test_search_tool.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── fireworks/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_fireworks/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── embeddings.py
│ │ │ ├── llms.py
│ │ │ ├── py.typed
│ │ │ └── version.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings.py
│ │ │ ├── test_llms.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── test_chat_models.py
│ │ ├── test_embeddings.py
│ │ ├── test_embeddings_standard.py
│ │ ├── test_imports.py
│ │ ├── test_llms.py
│ │ └── test_standard.py
│ ├── groq/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_groq/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── py.typed
│ │ │ └── version.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── __init__.py
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── fake/
│ │ │ ├── __init__.py
│ │ │ └── callbacks.py
│ │ ├── test_chat_models.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── huggingface/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_huggingface/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ └── huggingface.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── huggingface.py
│ │ │ │ └── huggingface_endpoint.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── huggingface_endpoint.py
│ │ │ │ └── huggingface_pipeline.py
│ │ │ ├── py.typed
│ │ │ ├── tests/
│ │ │ │ ├── __init__.py
│ │ │ │ └── integration_tests/
│ │ │ │ └── __init__.py
│ │ │ └── utils/
│ │ │ └── import_utils.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings_standard.py
│ │ │ ├── test_llms.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_chat_models.py
│ │ ├── test_huggingface_endpoint.py
│ │ └── test_huggingface_pipeline.py
│ ├── mistralai/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_mistralai/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ ├── embeddings.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── test_chat_models.py
│ │ ├── test_embeddings.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── nomic/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_nomic/
│ │ │ ├── __init__.py
│ │ │ ├── embeddings.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_compile.py
│ │ │ └── test_embeddings.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_embeddings.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── ollama/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_ollama/
│ │ │ ├── __init__.py
│ │ │ ├── _compat.py
│ │ │ ├── _utils.py
│ │ │ ├── chat_models.py
│ │ │ ├── embeddings.py
│ │ │ ├── llms.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── cassettes/
│ │ │ │ │ └── test_chat_models_standard/
│ │ │ │ │ └── TestChatOllama.test_stream_time.yaml
│ │ │ │ ├── test_chat_models.py
│ │ │ │ ├── test_chat_models_reasoning.py
│ │ │ │ └── test_chat_models_standard.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embeddings.py
│ │ │ └── test_llms.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_auth.py
│ │ ├── test_chat_models.py
│ │ ├── test_embeddings.py
│ │ ├── test_imports.py
│ │ └── test_llms.py
│ ├── openai/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_openai/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _client_utils.py
│ │ │ │ ├── _compat.py
│ │ │ │ ├── azure.py
│ │ │ │ └── base.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _profiles.py
│ │ │ │ └── profile_augmentations.toml
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── azure.py
│ │ │ │ └── base.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── azure.py
│ │ │ │ └── base.py
│ │ │ ├── middleware/
│ │ │ │ ├── __init__.py
│ │ │ │ └── openai_moderation.py
│ │ │ ├── output_parsers/
│ │ │ │ ├── __init__.py
│ │ │ │ └── tools.py
│ │ │ ├── py.typed
│ │ │ └── tools/
│ │ │ ├── __init__.py
│ │ │ └── custom_tool.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_azure.py
│ │ │ │ ├── test_azure_standard.py
│ │ │ │ ├── test_base.py
│ │ │ │ ├── test_base_standard.py
│ │ │ │ ├── test_responses_api.py
│ │ │ │ └── test_responses_standard.py
│ │ │ ├── embeddings/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_azure.py
│ │ │ │ ├── test_base.py
│ │ │ │ └── test_base_standard.py
│ │ │ ├── llms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_azure.py
│ │ │ │ └── test_base.py
│ │ │ └── test_compile.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── chat_models/
│ │ │ ├── __init__.py
│ │ │ ├── __snapshots__/
│ │ │ │ ├── test_azure_standard.ambr
│ │ │ │ ├── test_base_standard.ambr
│ │ │ │ └── test_responses_standard.ambr
│ │ │ ├── test_azure.py
│ │ │ ├── test_azure_standard.py
│ │ │ ├── test_base.py
│ │ │ ├── test_base_standard.py
│ │ │ ├── test_imports.py
│ │ │ ├── test_prompt_cache_key.py
│ │ │ ├── test_responses_standard.py
│ │ │ └── test_responses_stream.py
│ │ ├── embeddings/
│ │ │ ├── __init__.py
│ │ │ ├── test_azure_embeddings.py
│ │ │ ├── test_azure_standard.py
│ │ │ ├── test_base.py
│ │ │ ├── test_base_standard.py
│ │ │ └── test_imports.py
│ │ ├── fake/
│ │ │ ├── __init__.py
│ │ │ └── callbacks.py
│ │ ├── llms/
│ │ │ ├── __init__.py
│ │ │ ├── test_azure.py
│ │ │ ├── test_base.py
│ │ │ └── test_imports.py
│ │ ├── middleware/
│ │ │ ├── __init__.py
│ │ │ └── test_openai_moderation_middleware.py
│ │ ├── test_imports.py
│ │ ├── test_load.py
│ │ ├── test_secrets.py
│ │ ├── test_token_counts.py
│ │ └── test_tools.py
│ ├── openrouter/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_openrouter/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ └── _profiles.py
│ │ │ └── py.typed
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── __init__.py
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_compile.py
│ │ │ └── test_standard.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── __snapshots__/
│ │ │ └── test_standard.ambr
│ │ ├── test_chat_models.py
│ │ ├── test_imports.py
│ │ └── test_standard.py
│ ├── perplexity/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_perplexity/
│ │ │ ├── __init__.py
│ │ │ ├── _utils.py
│ │ │ ├── chat_models.py
│ │ │ ├── data/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── _profiles.py
│ │ │ │ └── profile_augmentations.toml
│ │ │ ├── output_parsers.py
│ │ │ ├── py.typed
│ │ │ ├── retrievers.py
│ │ │ ├── tools.py
│ │ │ └── types.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── test_chat_models.py
│ │ │ ├── test_chat_models_standard.py
│ │ │ ├── test_compile.py
│ │ │ └── test_search_api.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_chat_models.py
│ │ ├── test_chat_models_standard.py
│ │ ├── test_imports.py
│ │ ├── test_output_parsers.py
│ │ ├── test_retrievers.py
│ │ ├── test_secrets.py
│ │ └── test_tools.py
│ ├── qdrant/
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── langchain_qdrant/
│ │ │ ├── __init__.py
│ │ │ ├── _utils.py
│ │ │ ├── fastembed_sparse.py
│ │ │ ├── py.typed
│ │ │ ├── qdrant.py
│ │ │ ├── sparse_embeddings.py
│ │ │ └── vectorstores.py
│ │ ├── pyproject.toml
│ │ ├── scripts/
│ │ │ ├── check_imports.py
│ │ │ └── lint_imports.sh
│ │ └── tests/
│ │ ├── __init__.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── async_api/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_add_texts.py
│ │ │ │ ├── test_from_texts.py
│ │ │ │ ├── test_max_marginal_relevance.py
│ │ │ │ └── test_similarity_search.py
│ │ │ ├── common.py
│ │ │ ├── conftest.py
│ │ │ ├── fastembed/
│ │ │ │ ├── __init__.py
│ │ │ │ └── test_fastembed_sparse.py
│ │ │ ├── fixtures.py
│ │ │ ├── qdrant_vector_store/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_add_texts.py
│ │ │ │ ├── test_from_existing.py
│ │ │ │ ├── test_from_texts.py
│ │ │ │ ├── test_mmr.py
│ │ │ │ └── test_search.py
│ │ │ ├── test_add_texts.py
│ │ │ ├── test_compile.py
│ │ │ ├── test_embedding_interface.py
│ │ │ ├── test_from_existing_collection.py
│ │ │ ├── test_from_texts.py
│ │ │ ├── test_max_marginal_relevance.py
│ │ │ └── test_similarity_search.py
│ │ └── unit_tests/
│ │ ├── __init__.py
│ │ ├── test_imports.py
│ │ ├── test_standard.py
│ │ └── test_vectorstores.py
│ └── xai/
│ ├── LICENSE
│ ├── Makefile
│ ├── README.md
│ ├── langchain_xai/
│ │ ├── __init__.py
│ │ ├── chat_models.py
│ │ ├── data/
│ │ │ ├── __init__.py
│ │ │ └── _profiles.py
│ │ └── py.typed
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ ├── test_chat_models.py
│ │ ├── test_chat_models_standard.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── __snapshots__/
│ │ └── test_chat_models_standard.ambr
│ ├── test_chat_models.py
│ ├── test_chat_models_standard.py
│ ├── test_imports.py
│ └── test_secrets.py
├── standard-tests/
│ ├── Makefile
│ ├── README.md
│ ├── langchain_tests/
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── conftest.py
│ │ ├── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── base_store.py
│ │ │ ├── cache.py
│ │ │ ├── chat_models.py
│ │ │ ├── embeddings.py
│ │ │ ├── indexer.py
│ │ │ ├── retrievers.py
│ │ │ ├── sandboxes.py
│ │ │ ├── tools.py
│ │ │ └── vectorstores.py
│ │ ├── py.typed
│ │ ├── unit_tests/
│ │ │ ├── __init__.py
│ │ │ ├── chat_models.py
│ │ │ ├── embeddings.py
│ │ │ └── tools.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ └── pydantic.py
│ ├── pyproject.toml
│ ├── scripts/
│ │ ├── check_imports.py
│ │ └── lint_imports.sh
│ └── tests/
│ ├── __init__.py
│ ├── integration_tests/
│ │ ├── __init__.py
│ │ └── test_compile.py
│ └── unit_tests/
│ ├── __init__.py
│ ├── custom_chat_model.py
│ ├── test_basic_retriever.py
│ ├── test_basic_tool.py
│ ├── test_custom_chat_model.py
│ ├── test_decorated_tool.py
│ ├── test_embeddings.py
│ ├── test_in_memory_base_store.py
│ ├── test_in_memory_cache.py
│ └── test_in_memory_vectorstore.py
└── text-splitters/
├── Makefile
├── README.md
├── extended_testing_deps.txt
├── langchain_text_splitters/
│ ├── __init__.py
│ ├── base.py
│ ├── character.py
│ ├── html.py
│ ├── json.py
│ ├── jsx.py
│ ├── konlpy.py
│ ├── latex.py
│ ├── markdown.py
│ ├── nltk.py
│ ├── py.typed
│ ├── python.py
│ ├── sentence_transformers.py
│ ├── spacy.py
│ └── xsl/
│ └── converting_to_header.xslt
├── pyproject.toml
├── scripts/
│ ├── check_imports.py
│ └── lint_imports.sh
└── tests/
├── __init__.py
├── integration_tests/
│ ├── __init__.py
│ ├── test_compile.py
│ ├── test_nlp_text_splitters.py
│ └── test_text_splitter.py
├── test_data/
│ └── test_splitter.xslt
└── unit_tests/
├── __init__.py
├── conftest.py
├── test_html_security.py
└── test_text_splitters.py
Showing preview only (1,217K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (12145 symbols across 1888 files)
FILE: .github/scripts/check_diff.py
function all_package_dirs (line 51) | def all_package_dirs() -> Set[str]:
function dependents_graph (line 59) | def dependents_graph() -> dict:
function add_dependents (line 112) | def add_dependents(dirs_to_eval: Set[str], dependents: dict) -> List[str]:
function _get_configs_for_single_dir (line 125) | def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, s...
function _get_pydantic_test_configs (line 153) | def _get_pydantic_test_configs(
function _get_configs_for_multi_dirs (line 208) | def _get_configs_for_multi_dirs(
FILE: .github/scripts/get_min_versions.py
function get_pypi_versions (line 37) | def get_pypi_versions(package_name: str) -> List[str]:
function get_minimum_version (line 56) | def get_minimum_version(package_name: str, spec_string: str) -> str | None:
function _check_python_version_from_requirement (line 94) | def _check_python_version_from_requirement(
function get_min_version_from_toml (line 111) | def get_min_version_from_toml(
function check_python_version (line 155) | def check_python_version(version_string, constraint_string):
FILE: .github/scripts/pr-labeler.js
function loadConfig (line 9) | function loadConfig() {
function init (line 35) | function init(github, owner, repo, config, core) {
function loadAndInit (line 273) | function loadAndInit(github, owner, repo, core) {
FILE: libs/core/langchain_core/_api/__init__.py
function __getattr__ (line 60) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 81) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/_api/beta_decorator.py
class LangChainBetaWarning (line 22) | class LangChainBetaWarning(DeprecationWarning):
function beta (line 32) | def beta(
function suppress_langchain_beta_warning (line 206) | def suppress_langchain_beta_warning() -> Generator[None, None, None]:
function warn_beta (line 213) | def warn_beta(
function surface_langchain_beta_warnings (line 248) | def surface_langchain_beta_warnings() -> None:
FILE: libs/core/langchain_core/_api/deprecation.py
function _build_deprecation_message (line 30) | def _build_deprecation_message(
class LangChainDeprecationWarning (line 51) | class LangChainDeprecationWarning(DeprecationWarning):
class LangChainPendingDeprecationWarning (line 55) | class LangChainPendingDeprecationWarning(PendingDeprecationWarning):
function _validate_deprecation_params (line 66) | def _validate_deprecation_params(
function deprecated (line 89) | def deprecated(
function suppress_langchain_deprecation_warning (line 430) | def suppress_langchain_deprecation_warning() -> Generator[None, None, No...
function warn_deprecated (line 438) | def warn_deprecated(
function surface_langchain_deprecation_warnings (line 538) | def surface_langchain_deprecation_warnings() -> None:
function rename_parameter (line 555) | def rename_parameter(
FILE: libs/core/langchain_core/_api/internal.py
function is_caller_internal (line 5) | def is_caller_internal(depth: int = 2) -> bool:
FILE: libs/core/langchain_core/_api/path.py
function get_relative_path (line 11) | def get_relative_path(file: Path | str, *, relative_to: Path = PACKAGE_D...
function as_import_path (line 26) | def as_import_path(
FILE: libs/core/langchain_core/_import_utils.py
function import_attr (line 4) | def import_attr(
FILE: libs/core/langchain_core/_security/_ssrf_protection.py
function _normalize_ip (line 82) | def _normalize_ip(ip_str: str) -> str:
function is_private_ip (line 97) | def is_private_ip(ip_str: str) -> bool:
function is_cloud_metadata (line 113) | def is_cloud_metadata(hostname: str, ip_str: str | None = None) -> bool:
function is_localhost (line 143) | def is_localhost(hostname: str, ip_str: str | None = None) -> bool:
function validate_safe_url (line 174) | def validate_safe_url(
function is_safe_url (line 293) | def is_safe_url(
function _validate_url_ssrf_strict (line 327) | def _validate_url_ssrf_strict(v: Any) -> Any:
function _validate_url_ssrf_https_only (line 334) | def _validate_url_ssrf_https_only(v: Any) -> Any:
function _validate_url_ssrf_relaxed (line 341) | def _validate_url_ssrf_relaxed(v: Any) -> Any:
FILE: libs/core/langchain_core/agents.py
class AgentAction (line 44) | class AgentAction(Serializable):
method __init__ (line 71) | def __init__(self, tool: str, tool_input: str | dict, log: str, **kwar...
method is_lc_serializable (line 82) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 91) | def get_lc_namespace(cls) -> list[str]:
method messages (line 100) | def messages(self) -> Sequence[BaseMessage]:
class AgentActionMessageLog (line 105) | class AgentActionMessageLog(AgentAction):
class AgentStep (line 131) | class AgentStep(Serializable):
method messages (line 141) | def messages(self) -> Sequence[BaseMessage]:
class AgentFinish (line 146) | class AgentFinish(Serializable):
method __init__ (line 167) | def __init__(self, return_values: dict, log: str, **kwargs: Any):
method is_lc_serializable (line 172) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 177) | def get_lc_namespace(cls) -> list[str]:
method messages (line 186) | def messages(self) -> Sequence[BaseMessage]:
function _convert_agent_action_to_messages (line 191) | def _convert_agent_action_to_messages(
function _convert_agent_observation_to_messages (line 209) | def _convert_agent_observation_to_messages(
function _create_function_message (line 234) | def _create_function_message(
FILE: libs/core/langchain_core/caches.py
class BaseCache (line 32) | class BaseCache(ABC):
method lookup (line 49) | def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None:
method update (line 73) | def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_...
method clear (line 97) | def clear(self, **kwargs: Any) -> None:
method alookup (line 100) | async def alookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TY...
method aupdate (line 125) | async def aupdate(
method aclear (line 150) | async def aclear(self, **kwargs: Any) -> None:
class InMemoryCache (line 155) | class InMemoryCache(BaseCache):
method __init__ (line 182) | def __init__(self, *, maxsize: int | None = None) -> None:
method lookup (line 201) | def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None:
method update (line 216) | def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_...
method clear (line 234) | def clear(self, **kwargs: Any) -> None:
method alookup (line 238) | async def alookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TY...
method aupdate (line 253) | async def aupdate(
method aclear (line 270) | async def aclear(self, **kwargs: Any) -> None:
FILE: libs/core/langchain_core/callbacks/__init__.py
function __getattr__ (line 124) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 131) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/callbacks/base.py
class RetrieverManagerMixin (line 23) | class RetrieverManagerMixin:
method on_retriever_error (line 26) | def on_retriever_error(
method on_retriever_end (line 43) | def on_retriever_end(
class LLMManagerMixin (line 61) | class LLMManagerMixin:
method on_llm_new_token (line 64) | def on_llm_new_token(
method on_llm_end (line 89) | def on_llm_end(
method on_llm_error (line 108) | def on_llm_error(
class ChainManagerMixin (line 128) | class ChainManagerMixin:
method on_chain_end (line 131) | def on_chain_end(
method on_chain_error (line 148) | def on_chain_error(
method on_agent_action (line 165) | def on_agent_action(
method on_agent_finish (line 182) | def on_agent_finish(
class ToolManagerMixin (line 200) | class ToolManagerMixin:
method on_tool_end (line 203) | def on_tool_end(
method on_tool_error (line 220) | def on_tool_error(
class CallbackManagerMixin (line 238) | class CallbackManagerMixin:
method on_llm_start (line 241) | def on_llm_start(
method on_chat_model_start (line 270) | def on_chat_model_start(
method on_retriever_start (line 322) | def on_retriever_start(
method on_chain_start (line 345) | def on_chain_start(
method on_tool_start (line 368) | def on_tool_start(
class RunManagerMixin (line 394) | class RunManagerMixin:
method on_text (line 397) | def on_text(
method on_retry (line 414) | def on_retry(
method on_custom_event (line 431) | def on_custom_event(
class BaseCallbackHandler (line 455) | class BaseCallbackHandler(
method ignore_llm (line 472) | def ignore_llm(self) -> bool:
method ignore_retry (line 477) | def ignore_retry(self) -> bool:
method ignore_chain (line 482) | def ignore_chain(self) -> bool:
method ignore_agent (line 487) | def ignore_agent(self) -> bool:
method ignore_retriever (line 492) | def ignore_retriever(self) -> bool:
method ignore_chat_model (line 497) | def ignore_chat_model(self) -> bool:
method ignore_custom_event (line 502) | def ignore_custom_event(self) -> bool:
class AsyncCallbackHandler (line 507) | class AsyncCallbackHandler(BaseCallbackHandler):
method on_llm_start (line 510) | async def on_llm_start(
method on_chat_model_start (line 539) | async def on_chat_model_start(
method on_llm_new_token (line 591) | async def on_llm_new_token(
method on_llm_end (line 614) | async def on_llm_end(
method on_llm_error (line 633) | async def on_llm_error(
method on_chain_start (line 655) | async def on_chain_start(
method on_chain_end (line 678) | async def on_chain_end(
method on_chain_error (line 697) | async def on_chain_error(
method on_tool_start (line 716) | async def on_tool_start(
method on_tool_end (line 741) | async def on_tool_end(
method on_tool_error (line 760) | async def on_tool_error(
method on_text (line 779) | async def on_text(
method on_retry (line 798) | async def on_retry(
method on_agent_action (line 815) | async def on_agent_action(
method on_agent_finish (line 834) | async def on_agent_finish(
method on_retriever_start (line 853) | async def on_retriever_start(
method on_retriever_end (line 876) | async def on_retriever_end(
method on_retriever_error (line 895) | async def on_retriever_error(
method on_custom_event (line 914) | async def on_custom_event(
class BaseCallbackManager (line 938) | class BaseCallbackManager(CallbackManagerMixin):
method __init__ (line 941) | def __init__(
method copy (line 973) | def copy(self) -> Self:
method merge (line 985) | def merge(self, other: BaseCallbackManager) -> Self:
method is_async (line 1041) | def is_async(self) -> bool:
method add_handler (line 1045) | def add_handler(
method remove_handler (line 1061) | def remove_handler(self, handler: BaseCallbackHandler) -> None:
method set_handlers (line 1072) | def set_handlers(
method set_handler (line 1088) | def set_handler(
method add_tags (line 1101) | def add_tags(
method remove_tags (line 1119) | def remove_tags(self, tags: list[str]) -> None:
method add_metadata (line 1131) | def add_metadata(
method remove_metadata (line 1146) | def remove_metadata(self, keys: list[str]) -> None:
FILE: libs/core/langchain_core/callbacks/file.py
class FileCallbackHandler (line 21) | class FileCallbackHandler(BaseCallbackHandler):
method __init__ (line 60) | def __init__(
method __enter__ (line 81) | def __enter__(self) -> Self:
method __exit__ (line 96) | def __exit__(
method __del__ (line 112) | def __del__(self) -> None:
method close (line 116) | def close(self) -> None:
method _write (line 126) | def _write(
method on_chain_start (line 164) | def on_chain_start(
method on_chain_end (line 183) | def on_chain_end(self, outputs: dict[str, Any], **kwargs: Any) -> None:
method on_agent_action (line 194) | def on_agent_action(
method on_tool_end (line 210) | def on_tool_end(
method on_text (line 237) | def on_text(
method on_agent_finish (line 254) | def on_agent_finish(
FILE: libs/core/langchain_core/callbacks/manager.py
function _get_debug (line 49) | def _get_debug() -> bool:
function trace_as_chain_group (line 54) | def trace_as_chain_group(
function atrace_as_chain_group (line 134) | async def atrace_as_chain_group(
function shielded (line 219) | def shielded(func: Func) -> Func:
function handle_event (line 254) | def handle_event(
function _run_coros (line 337) | def _run_coros(coros: list[Coroutine[Any, Any, Any]]) -> None:
function _ahandle_event_for_handler (line 366) | async def _ahandle_event_for_handler(
function ahandle_event (line 418) | async def ahandle_event(
class BaseRunManager (line 455) | class BaseRunManager(RunManagerMixin):
method __init__ (line 458) | def __init__(
method get_noop_manager (line 493) | def get_noop_manager(cls) -> Self:
class RunManager (line 511) | class RunManager(BaseRunManager):
method on_text (line 514) | def on_text(
method on_retry (line 538) | def on_retry(
class ParentRunManager (line 564) | class ParentRunManager(RunManager):
method get_child (line 567) | def get_child(self, tag: str | None = None) -> CallbackManager:
class AsyncRunManager (line 586) | class AsyncRunManager(BaseRunManager, ABC):
method get_sync (line 590) | def get_sync(self) -> RunManager:
method on_text (line 598) | async def on_text(
method on_retry (line 622) | async def on_retry(
class AsyncParentRunManager (line 648) | class AsyncParentRunManager(AsyncRunManager):
method get_child (line 651) | def get_child(self, tag: str | None = None) -> AsyncCallbackManager:
class CallbackManagerForLLMRun (line 670) | class CallbackManagerForLLMRun(RunManager, LLMManagerMixin):
method on_llm_new_token (line 673) | def on_llm_new_token(
method on_llm_end (line 702) | def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
method on_llm_error (line 723) | def on_llm_error(
class AsyncCallbackManagerForLLMRun (line 751) | class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
method get_sync (line 754) | def get_sync(self) -> CallbackManagerForLLMRun:
method on_llm_new_token (line 772) | async def on_llm_new_token(
method on_llm_end (line 802) | async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
method on_llm_error (line 824) | async def on_llm_error(
class CallbackManagerForChainRun (line 853) | class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
method on_chain_end (line 856) | def on_chain_end(self, outputs: dict[str, Any] | Any, **kwargs: Any) -...
method on_chain_error (line 877) | def on_chain_error(
method on_agent_action (line 902) | def on_agent_action(self, action: AgentAction, **kwargs: Any) -> None:
method on_agent_finish (line 922) | def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> None:
class AsyncCallbackManagerForChainRun (line 943) | class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManage...
method get_sync (line 946) | def get_sync(self) -> CallbackManagerForChainRun:
method on_chain_end (line 964) | async def on_chain_end(self, outputs: dict[str, Any] | Any, **kwargs: ...
method on_chain_error (line 986) | async def on_chain_error(
method on_agent_action (line 1011) | async def on_agent_action(self, action: AgentAction, **kwargs: Any) ->...
method on_agent_finish (line 1031) | async def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) ->...
class CallbackManagerForToolRun (line 1052) | class CallbackManagerForToolRun(ParentRunManager, ToolManagerMixin):
method on_tool_end (line 1055) | def on_tool_end(
method on_tool_error (line 1080) | def on_tool_error(
class AsyncCallbackManagerForToolRun (line 1106) | class AsyncCallbackManagerForToolRun(AsyncParentRunManager, ToolManagerM...
method get_sync (line 1109) | def get_sync(self) -> CallbackManagerForToolRun:
method on_tool_end (line 1126) | async def on_tool_end(self, output: Any, **kwargs: Any) -> None:
method on_tool_error (line 1147) | async def on_tool_error(
class CallbackManagerForRetrieverRun (line 1173) | class CallbackManagerForRetrieverRun(ParentRunManager, RetrieverManagerM...
method on_retriever_end (line 1176) | def on_retriever_end(
method on_retriever_error (line 1201) | def on_retriever_error(
class AsyncCallbackManagerForRetrieverRun (line 1227) | class AsyncCallbackManagerForRetrieverRun(
method get_sync (line 1233) | def get_sync(self) -> CallbackManagerForRetrieverRun:
method on_retriever_end (line 1252) | async def on_retriever_end(
method on_retriever_error (line 1276) | async def on_retriever_error(
class CallbackManager (line 1302) | class CallbackManager(BaseCallbackManager):
method on_llm_start (line 1305) | def on_llm_start(
method on_chat_model_start (line 1356) | def on_chat_model_start(
method on_chain_start (line 1410) | def on_chain_start(
method on_tool_start (line 1456) | def on_tool_start(
method on_retriever_start (line 1516) | def on_retriever_start(
method on_custom_event (line 1563) | def on_custom_event(
method configure (line 1608) | def configure(
class CallbackManagerForChainGroup (line 1644) | class CallbackManagerForChainGroup(CallbackManager):
method __init__ (line 1647) | def __init__(
method copy (line 1676) | def copy(self) -> CallbackManagerForChainGroup:
method merge (line 1688) | def merge(
method on_chain_end (line 1748) | def on_chain_end(self, outputs: dict[str, Any] | Any, **kwargs: Any) -...
method on_chain_error (line 1759) | def on_chain_error(
class AsyncCallbackManager (line 1775) | class AsyncCallbackManager(BaseCallbackManager):
method is_async (line 1779) | def is_async(self) -> bool:
method on_llm_start (line 1783) | async def on_llm_start(
method on_chat_model_start (line 1871) | async def on_chat_model_start(
method on_chain_start (line 1942) | async def on_chain_start(
method on_tool_start (line 1988) | async def on_tool_start(
method on_custom_event (line 2035) | async def on_custom_event(
method on_retriever_start (line 2080) | async def on_retriever_start(
method configure (line 2128) | def configure(
class AsyncCallbackManagerForChainGroup (line 2164) | class AsyncCallbackManagerForChainGroup(AsyncCallbackManager):
method __init__ (line 2167) | def __init__(
method copy (line 2194) | def copy(self) -> AsyncCallbackManagerForChainGroup:
method merge (line 2207) | def merge(
method on_chain_end (line 2269) | async def on_chain_end(self, outputs: dict[str, Any] | Any, **kwargs: ...
method on_chain_error (line 2279) | async def on_chain_error(
function _configure (line 2297) | def _configure(
function adispatch_custom_event (line 2485) | async def adispatch_custom_event(
function dispatch_custom_event (line 2610) | def dispatch_custom_event(
function _executor (line 2685) | def _executor() -> ThreadPoolExecutor:
FILE: libs/core/langchain_core/callbacks/stdout.py
class StdOutCallbackHandler (line 16) | class StdOutCallbackHandler(BaseCallbackHandler):
method __init__ (line 19) | def __init__(self, color: str | None = None) -> None:
method on_chain_start (line 28) | def on_chain_start(
method on_chain_end (line 47) | def on_chain_end(self, outputs: dict[str, Any], **kwargs: Any) -> None:
method on_agent_action (line 57) | def on_agent_action(
method on_tool_end (line 70) | def on_tool_end(
method on_text (line 95) | def on_text(
method on_agent_finish (line 113) | def on_agent_finish(
FILE: libs/core/langchain_core/callbacks/streaming_stdout.py
class StreamingStdOutCallbackHandler (line 18) | class StreamingStdOutCallbackHandler(BaseCallbackHandler):
method on_llm_start (line 24) | def on_llm_start(
method on_chat_model_start (line 35) | def on_chat_model_start(
method on_llm_new_token (line 50) | def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
method on_llm_end (line 60) | def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
method on_llm_error (line 68) | def on_llm_error(self, error: BaseException, **kwargs: Any) -> None:
method on_chain_start (line 76) | def on_chain_start(
method on_chain_end (line 87) | def on_chain_end(self, outputs: dict[str, Any], **kwargs: Any) -> None:
method on_chain_error (line 95) | def on_chain_error(self, error: BaseException, **kwargs: Any) -> None:
method on_tool_start (line 103) | def on_tool_start(
method on_agent_action (line 114) | def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any:
method on_tool_end (line 122) | def on_tool_end(self, output: Any, **kwargs: Any) -> None:
method on_tool_error (line 130) | def on_tool_error(self, error: BaseException, **kwargs: Any) -> None:
method on_text (line 138) | def on_text(self, text: str, **kwargs: Any) -> None:
method on_agent_finish (line 146) | def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> None:
FILE: libs/core/langchain_core/callbacks/usage.py
class UsageMetadataCallbackHandler (line 18) | class UsageMetadataCallbackHandler(BaseCallbackHandler):
method __init__ (line 51) | def __init__(self) -> None:
method __repr__ (line 58) | def __repr__(self) -> str:
method on_llm_end (line 62) | def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
function get_usage_metadata_callback (line 93) | def get_usage_metadata_callback(
FILE: libs/core/langchain_core/chat_history.py
class BaseChatMessageHistory (line 22) | class BaseChatMessageHistory(ABC):
method aget_messages (line 99) | async def aget_messages(self) -> list[BaseMessage]:
method add_user_message (line 112) | def add_user_message(self, message: HumanMessage | str) -> None:
method add_ai_message (line 130) | def add_ai_message(self, message: AIMessage | str) -> None:
method add_message (line 148) | def add_message(self, message: BaseMessage) -> None:
method add_messages (line 169) | def add_messages(self, messages: Sequence[BaseMessage]) -> None:
method aadd_messages (line 181) | async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None:
method clear (line 190) | def clear(self) -> None:
method aclear (line 193) | async def aclear(self) -> None:
method __str__ (line 197) | def __str__(self) -> str:
class InMemoryChatMessageHistory (line 202) | class InMemoryChatMessageHistory(BaseChatMessageHistory, BaseModel):
method aget_messages (line 211) | async def aget_messages(self) -> list[BaseMessage]:
method add_message (line 224) | def add_message(self, message: BaseMessage) -> None:
method aadd_messages (line 232) | async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None:
method clear (line 240) | def clear(self) -> None:
method aclear (line 244) | async def aclear(self) -> None:
FILE: libs/core/langchain_core/chat_loaders.py
class BaseChatLoader (line 9) | class BaseChatLoader(ABC):
method lazy_load (line 13) | def lazy_load(self) -> Iterator[ChatSession]:
method load (line 20) | def load(self) -> list[ChatSession]:
FILE: libs/core/langchain_core/chat_sessions.py
class ChatSession (line 9) | class ChatSession(TypedDict, total=False):
FILE: libs/core/langchain_core/cross_encoders.py
class BaseCrossEncoder (line 6) | class BaseCrossEncoder(ABC):
method score (line 10) | def score(self, text_pairs: list[tuple[str, str]]) -> list[float]:
FILE: libs/core/langchain_core/document_loaders/__init__.py
function __getattr__ (line 31) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 38) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/document_loaders/base.py
class BaseLoader (line 26) | class BaseLoader(ABC): # noqa: B024
method load (line 37) | def load(self) -> list[Document]:
method aload (line 45) | async def aload(self) -> list[Document]:
method load_and_split (line 53) | def load_and_split(
method lazy_load (line 91) | def lazy_load(self) -> Iterator[Document]:
method alazy_load (line 102) | async def alazy_load(self) -> AsyncIterator[Document]:
class BaseBlobParser (line 117) | class BaseBlobParser(ABC):
method lazy_parse (line 128) | def lazy_parse(self, blob: Blob) -> Iterator[Document]:
method parse (line 140) | def parse(self, blob: Blob) -> list[Document]:
FILE: libs/core/langchain_core/document_loaders/blob_loaders.py
class BlobLoader (line 19) | class BlobLoader(ABC):
method yield_blobs (line 27) | def yield_blobs(
FILE: libs/core/langchain_core/document_loaders/langsmith.py
class LangSmithLoader (line 17) | class LangSmithLoader(BaseLoader):
method __init__ (line 40) | def __init__(
method lazy_load (line 113) | def lazy_load(self) -> Iterator[Document]:
function _stringify (line 137) | def _stringify(x: str | dict[str, Any]) -> str:
FILE: libs/core/langchain_core/documents/__init__.py
function __getattr__ (line 47) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 54) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/documents/base.py
class BaseMedia (line 34) | class BaseMedia(Serializable):
class Blob (line 59) | class Blob(BaseMedia):
method source (line 137) | def source(self) -> str | None:
method check_blob_is_valid (line 151) | def check_blob_is_valid(cls, values: dict[str, Any]) -> Any:
method as_string (line 158) | def as_string(self) -> str:
method as_bytes (line 176) | def as_bytes(self) -> bytes:
method as_bytes_io (line 195) | def as_bytes_io(self) -> Generator[BytesIO | BufferedReader, None, None]:
method from_path (line 214) | def from_path(
method from_data (line 251) | def from_data(
method __repr__ (line 280) | def __repr__(self) -> str:
class Document (line 288) | class Document(BaseMedia):
method __init__ (line 311) | def __init__(self, page_content: str, **kwargs: Any) -> None:
method is_lc_serializable (line 318) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 323) | def get_lc_namespace(cls) -> list[str]:
method __str__ (line 331) | def __str__(self) -> str:
FILE: libs/core/langchain_core/documents/compressor.py
class BaseDocumentCompressor (line 19) | class BaseDocumentCompressor(BaseModel, ABC):
method compress_documents (line 37) | def compress_documents(
method acompress_documents (line 55) | async def acompress_documents(
FILE: libs/core/langchain_core/documents/transformers.py
class BaseDocumentTransformer (line 16) | class BaseDocumentTransformer(ABC):
method transform_documents (line 54) | def transform_documents(
method atransform_documents (line 66) | async def atransform_documents(
FILE: libs/core/langchain_core/embeddings/__init__.py
function __getattr__ (line 23) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 30) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/embeddings/embeddings.py
class Embeddings (line 8) | class Embeddings(ABC):
method embed_documents (line 37) | def embed_documents(self, texts: list[str]) -> list[list[float]]:
method embed_query (line 48) | def embed_query(self, text: str) -> list[float]:
method aembed_documents (line 58) | async def aembed_documents(self, texts: list[str]) -> list[list[float]]:
method aembed_query (line 69) | async def aembed_query(self, text: str) -> list[float]:
FILE: libs/core/langchain_core/embeddings/fake.py
class FakeEmbeddings (line 16) | class FakeEmbeddings(Embeddings, BaseModel):
method _get_embedding (line 58) | def _get_embedding(self) -> list[float]:
method embed_documents (line 62) | def embed_documents(self, texts: list[str]) -> list[list[float]]:
method embed_query (line 66) | def embed_query(self, text: str) -> list[float]:
class DeterministicFakeEmbedding (line 70) | class DeterministicFakeEmbedding(Embeddings, BaseModel):
method _get_embedding (line 113) | def _get_embedding(self, seed: int) -> list[float]:
method _get_seed (line 119) | def _get_seed(text: str) -> int:
method embed_documents (line 124) | def embed_documents(self, texts: list[str]) -> list[list[float]]:
method embed_query (line 128) | def embed_query(self, text: str) -> list[float]:
FILE: libs/core/langchain_core/env.py
function get_runtime_environment (line 10) | def get_runtime_environment() -> dict:
FILE: libs/core/langchain_core/example_selectors/__init__.py
function __getattr__ (line 39) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 46) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/example_selectors/base.py
class BaseExampleSelector (line 9) | class BaseExampleSelector(ABC):
method add_example (line 13) | def add_example(self, example: dict[str, str]) -> Any:
method aadd_example (line 24) | async def aadd_example(self, example: dict[str, str]) -> Any:
method select_examples (line 37) | def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
method aselect_examples (line 48) | async def aselect_examples(self, input_variables: dict[str, str]) -> l...
FILE: libs/core/langchain_core/example_selectors/length_based.py
function _get_length_based (line 13) | def _get_length_based(text: str) -> int:
class LengthBasedExampleSelector (line 17) | class LengthBasedExampleSelector(BaseExampleSelector, BaseModel):
method add_example (line 66) | def add_example(self, example: dict[str, str]) -> None:
method aadd_example (line 77) | async def aadd_example(self, example: dict[str, str]) -> None:
method post_init (line 87) | def post_init(self) -> Self:
method select_examples (line 95) | def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
method aselect_examples (line 118) | async def aselect_examples(self, input_variables: dict[str, str]) -> l...
FILE: libs/core/langchain_core/example_selectors/semantic_similarity.py
function sorted_values (line 18) | def sorted_values(values: dict[str, str]) -> list[Any]:
class _VectorStoreExampleSelector (line 31) | class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
method _example_to_text (line 52) | def _example_to_text(example: dict[str, str], input_keys: list[str] | ...
method _documents_to_examples (line 57) | def _documents_to_examples(self, documents: list[Document]) -> list[di...
method add_example (line 66) | def add_example(self, example: dict[str, str]) -> str:
method aadd_example (line 81) | async def aadd_example(self, example: dict[str, str]) -> str:
class SemanticSimilarityExampleSelector (line 97) | class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
method select_examples (line 100) | def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
method aselect_examples (line 118) | async def aselect_examples(self, input_variables: dict[str, str]) -> l...
method from_examples (line 137) | def from_examples(
method afrom_examples (line 181) | async def afrom_examples(
class MaxMarginalRelevanceExampleSelector (line 225) | class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
method select_examples (line 235) | def select_examples(self, input_variables: dict[str, str]) -> list[dict]:
method aselect_examples (line 251) | async def aselect_examples(self, input_variables: dict[str, str]) -> l...
method from_examples (line 268) | def from_examples(
method afrom_examples (line 314) | async def afrom_examples(
FILE: libs/core/langchain_core/exceptions.py
class LangChainException (line 7) | class LangChainException(Exception): # noqa: N818
class TracerException (line 11) | class TracerException(LangChainException):
class OutputParserException (line 15) | class OutputParserException(ValueError, LangChainException): # noqa: N818
method __init__ (line 25) | def __init__(
class ContextOverflowError (line 68) | class ContextOverflowError(LangChainException):
class ErrorCode (line 76) | class ErrorCode(Enum):
function create_message (line 88) | def create_message(*, message: str, error_code: ErrorCode) -> str:
FILE: libs/core/langchain_core/globals.py
function set_verbose (line 18) | def set_verbose(value: bool) -> None: # noqa: FBT001
function get_verbose (line 28) | def get_verbose() -> bool:
function set_debug (line 37) | def set_debug(value: bool) -> None: # noqa: FBT001
function get_debug (line 47) | def get_debug() -> bool:
function set_llm_cache (line 56) | def set_llm_cache(value: Optional["BaseCache"]) -> None:
function get_llm_cache (line 66) | def get_llm_cache() -> Optional["BaseCache"]:
FILE: libs/core/langchain_core/indexing/__init__.py
function __getattr__ (line 45) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 52) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/indexing/api.py
function _hash_string_to_uuid (line 44) | def _hash_string_to_uuid(input_string: str) -> str:
function _warn_about_sha1 (line 55) | def _warn_about_sha1() -> None:
function _hash_string (line 73) | def _hash_string(
function _hash_nested_dict (line 83) | def _hash_nested_dict(
function _batch (line 91) | def _batch(size: int, iterable: Iterable[T]) -> Iterator[list[T]]:
function _abatch (line 101) | async def _abatch(size: int, iterable: AsyncIterable[T]) -> AsyncIterato...
function _get_source_id_assigner (line 116) | def _get_source_id_assigner(
function _deduplicate_in_order (line 133) | def _deduplicate_in_order(
class IndexingException (line 147) | class IndexingException(LangChainException):
function _calculate_hash (line 151) | def _calculate_hash(
function _get_document_with_hash (line 169) | def _get_document_with_hash(
class _HashedDocument (line 229) | class _HashedDocument:
method __init__ (line 230) | def __init__(self, *args: Any, **kwargs: Any) -> None:
function _delete (line 241) | def _delete(
class IndexingResult (line 277) | class IndexingResult(TypedDict):
function index (line 290) | def index(
function _to_async_iterator (line 601) | async def _to_async_iterator(iterator: Iterable[T]) -> AsyncIterator[T]:
function _adelete (line 607) | async def _adelete(
function aindex (line 629) | async def aindex(
FILE: libs/core/langchain_core/indexing/base.py
class RecordManager (line 22) | class RecordManager(ABC):
method __init__ (line 57) | def __init__(
method create_schema (line 69) | def create_schema(self) -> None:
method acreate_schema (line 73) | async def acreate_schema(self) -> None:
method get_time (line 77) | def get_time(self) -> float:
method aget_time (line 88) | async def aget_time(self) -> float:
method update (line 99) | def update(
method aupdate (line 127) | async def aupdate(
method exists (line 155) | def exists(self, keys: Sequence[str]) -> list[bool]:
method aexists (line 166) | async def aexists(self, keys: Sequence[str]) -> list[bool]:
method list_keys (line 177) | def list_keys(
method alist_keys (line 198) | async def alist_keys(
method delete_keys (line 219) | def delete_keys(self, keys: Sequence[str]) -> None:
method adelete_keys (line 227) | async def adelete_keys(self, keys: Sequence[str]) -> None:
class _Record (line 235) | class _Record(TypedDict):
class InMemoryRecordManager (line 240) | class InMemoryRecordManager(RecordManager):
method __init__ (line 243) | def __init__(self, namespace: str) -> None:
method create_schema (line 255) | def create_schema(self) -> None:
method acreate_schema (line 258) | async def acreate_schema(self) -> None:
method get_time (line 262) | def get_time(self) -> float:
method aget_time (line 266) | async def aget_time(self) -> float:
method update (line 269) | def update(
method aupdate (line 306) | async def aupdate(
method exists (line 330) | def exists(self, keys: Sequence[str]) -> list[bool]:
method aexists (line 341) | async def aexists(self, keys: Sequence[str]) -> list[bool]:
method list_keys (line 352) | def list_keys(
method alist_keys (line 388) | async def alist_keys(
method delete_keys (line 415) | def delete_keys(self, keys: Sequence[str]) -> None:
method adelete_keys (line 425) | async def adelete_keys(self, keys: Sequence[str]) -> None:
class UpsertResponse (line 434) | class UpsertResponse(TypedDict):
class DeleteResponse (line 460) | class DeleteResponse(TypedDict, total=False):
class DocumentIndex (line 497) | class DocumentIndex(BaseRetriever):
method upsert (line 514) | def upsert(self, items: Sequence[Document], /, **kwargs: Any) -> Upser...
method aupsert (line 535) | async def aupsert(
method delete (line 565) | def delete(self, ids: list[str] | None = None, **kwargs: Any) -> Delet...
method adelete (line 581) | async def adelete(
method get (line 605) | def get(
method aget (line 631) | async def aget(
FILE: libs/core/langchain_core/indexing/in_memory.py
class InMemoryDocumentIndex (line 19) | class InMemoryDocumentIndex(DocumentIndex):
method upsert (line 32) | def upsert(self, items: Sequence[Document], /, **kwargs: Any) -> Upser...
method delete (line 61) | def delete(self, ids: list[str] | None = None, **kwargs: Any) -> Delet...
method get (line 90) | def get(self, ids: Sequence[str], /, **kwargs: Any) -> list[Document]:
method _get_relevant_documents (line 94) | def _get_relevant_documents(
FILE: libs/core/langchain_core/language_models/__init__.py
function __getattr__ (line 108) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 115) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/language_models/_utils.py
function is_openai_data_block (line 17) | def is_openai_data_block(
class ParsedDataUri (line 83) | class ParsedDataUri(TypedDict):
function _parse_data_uri (line 89) | def _parse_data_uri(uri: str) -> ParsedDataUri | None:
function _normalize_messages (line 124) | def _normalize_messages(
function _ensure_message_copy (line 295) | def _ensure_message_copy(message: T, formatted_message: T) -> T:
function _update_content_block (line 304) | def _update_content_block(
function _update_message_content_to_blocks (line 318) | def _update_message_content_to_blocks(message: T, output_version: str) -...
FILE: libs/core/langchain_core/language_models/base.py
class LangSmithParams (line 49) | class LangSmithParams(TypedDict, total=False):
function get_tokenizer (line 77) | def get_tokenizer() -> Any:
function _get_token_ids_default_method (line 103) | def _get_token_ids_default_method(text: str) -> list[int]:
function _get_verbosity (line 137) | def _get_verbosity() -> bool:
class BaseLanguageModel (line 141) | class BaseLanguageModel(
method set_verbose (line 183) | def set_verbose(cls, verbose: bool | None) -> bool: # noqa: FBT001
method InputType (line 201) | def InputType(self) -> TypeAlias:
method generate_prompt (line 209) | def generate_prompt(
method agenerate_prompt (line 253) | async def agenerate_prompt(
method with_structured_output (line 296) | def with_structured_output(
method _get_ls_params (line 304) | def _get_ls_params(
method _get_ls_params_with_defaults (line 312) | def _get_ls_params_with_defaults(
method _identifying_params (line 321) | def _identifying_params(self) -> Mapping[str, Any]:
method get_token_ids (line 325) | def get_token_ids(self, text: str) -> list[int]:
method get_num_tokens (line 339) | def get_num_tokens(self, text: str) -> int:
method get_num_tokens_from_messages (line 356) | def get_num_tokens_from_messages(
FILE: libs/core/langchain_core/language_models/chat_models.py
function _generate_response_from_error (line 88) | def _generate_response_from_error(error: BaseException) -> list[ChatGene...
function _format_for_tracing (line 118) | def _format_for_tracing(messages: list[BaseMessage]) -> list[BaseMessage]:
function generate_from_stream (line 186) | def generate_from_stream(stream: Iterator[ChatGenerationChunk]) -> ChatR...
function agenerate_from_stream (line 215) | async def agenerate_from_stream(
function _format_ls_structured_output (line 231) | def _format_ls_structured_output(ls_structured_output_format: dict | Non...
class BaseChatModel (line 250) | class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
method _resolve_model_profile (line 364) | def _resolve_model_profile(self) -> ModelProfile | None:
method _set_model_profile (line 384) | def _set_model_profile(self) -> Self:
method _check_profile_keys (line 404) | def _check_profile_keys(self) -> Self:
method _serialized (line 413) | def _serialized(self) -> dict[str, Any]:
method OutputType (line 423) | def OutputType(self) -> Any:
method _convert_input (line 427) | def _convert_input(self, model_input: LanguageModelInput) -> PromptValue:
method invoke (line 441) | def invoke(
method ainvoke (line 468) | async def ainvoke(
method _should_stream (line 491) | def _should_stream(
method stream (line 532) | def stream(
method astream (line 658) | async def astream(
method _combine_llm_outputs (line 789) | def _combine_llm_outputs(self, _llm_outputs: list[dict | None], /) -> ...
method _convert_cached_generations (line 792) | def _convert_cached_generations(self, cache_val: list) -> list[ChatGen...
method _get_invocation_params (line 831) | def _get_invocation_params(
method _get_ls_params (line 840) | def _get_ls_params(
method _get_ls_params_with_defaults (line 882) | def _get_ls_params_with_defaults(
method _get_llm_string (line 892) | def _get_llm_string(self, stop: list[str] | None = None, **kwargs: Any...
method generate (line 906) | def generate(
method agenerate (line 1029) | async def agenerate(
method generate_prompt (line 1177) | def generate_prompt(
method agenerate_prompt (line 1188) | async def agenerate_prompt(
method _generate_with_cache (line 1200) | def _generate_with_cache(
method _agenerate_with_cache (line 1326) | async def _agenerate_with_cache(
method _generate (line 1453) | def _generate(
method _agenerate (line 1472) | async def _agenerate(
method _stream (line 1499) | def _stream(
method _astream (line 1519) | async def _astream(
method _call_async (line 1557) | async def _call_async(
method _llm_type (line 1575) | def _llm_type(self) -> str:
method dict (line 1579) | def dict(self, **kwargs: Any) -> dict:
method bind_tools (line 1585) | def bind_tools(
method with_structured_output (line 1604) | def with_structured_output(
class SimpleChatModel (line 1788) | class SimpleChatModel(BaseChatModel):
method _generate (line 1797) | def _generate(
method _call (line 1810) | def _call(
method _agenerate (line 1819) | async def _agenerate(
function _gen_info_and_msg_metadata (line 1836) | def _gen_info_and_msg_metadata(
function _cleanup_llm_representation (line 1848) | def _cleanup_llm_representation(serialized: Any, depth: int) -> None:
FILE: libs/core/langchain_core/language_models/fake.py
class FakeListLLM (line 19) | class FakeListLLM(LLM):
method _llm_type (line 39) | def _llm_type(self) -> str:
method _call (line 44) | def _call(
method _acall (line 60) | async def _acall(
method _identifying_params (line 77) | def _identifying_params(self) -> Mapping[str, Any]:
class FakeListLLMError (line 81) | class FakeListLLMError(Exception):
class FakeStreamingListLLM (line 85) | class FakeStreamingListLLM(FakeListLLM):
method stream (line 98) | def stream(
method astream (line 119) | async def astream(
FILE: libs/core/langchain_core/language_models/fake_chat_models.py
class FakeMessagesListChatModel (line 21) | class FakeMessagesListChatModel(BaseChatModel):
method _generate (line 32) | def _generate(
method _llm_type (line 51) | def _llm_type(self) -> str:
class FakeListChatModelError (line 55) | class FakeListChatModelError(Exception):
class FakeListChatModel (line 59) | class FakeListChatModel(SimpleChatModel):
method _llm_type (line 72) | def _llm_type(self) -> str:
method _call (line 76) | def _call(
method _stream (line 95) | def _stream(
method _astream (line 124) | async def _astream(
method _identifying_params (line 153) | def _identifying_params(self) -> dict[str, Any]:
method batch (line 158) | def batch(
method abatch (line 174) | async def abatch(
class FakeChatModel (line 192) | class FakeChatModel(SimpleChatModel):
method _call (line 196) | def _call(
method _agenerate (line 206) | async def _agenerate(
method _llm_type (line 219) | def _llm_type(self) -> str:
method _identifying_params (line 223) | def _identifying_params(self) -> dict[str, Any]:
class GenericFakeChatModel (line 227) | class GenericFakeChatModel(BaseChatModel):
method _generate (line 254) | def _generate(
method _stream (line 266) | def _stream(
method _llm_type (line 370) | def _llm_type(self) -> str:
class ParrotFakeChatModel (line 374) | class ParrotFakeChatModel(BaseChatModel):
method _generate (line 382) | def _generate(
method _llm_type (line 395) | def _llm_type(self) -> str:
FILE: libs/core/langchain_core/language_models/llms.py
function _log_error_once (line 68) | def _log_error_once(msg: str) -> None:
function create_base_retry_decorator (line 73) | def create_base_retry_decorator(
function _resolve_cache (line 131) | def _resolve_cache(*, cache: BaseCache | bool | None) -> BaseCache | None:
function get_prompts (line 155) | def get_prompts(
function aget_prompts (line 191) | async def aget_prompts(
function update_cache (line 226) | def update_cache(
function aupdate_cache (line 259) | async def aupdate_cache(
class BaseLLM (line 292) | class BaseLLM(BaseLanguageModel[str], ABC):
method _serialized (line 303) | def _serialized(self) -> dict[str, Any]:
method OutputType (line 313) | def OutputType(self) -> type[str]:
method _convert_input (line 317) | def _convert_input(self, model_input: LanguageModelInput) -> PromptValue:
method _get_ls_params (line 330) | def _get_ls_params(
method invoke (line 370) | def invoke(
method ainvoke (line 395) | async def ainvoke(
method batch (line 417) | def batch(
method abatch (line 464) | async def abatch(
method stream (line 510) | def stream(
method astream (line 577) | async def astream(
method _generate (line 650) | def _generate(
method _agenerate (line 673) | async def _agenerate(
method _stream (line 704) | def _stream(
method _astream (line 735) | async def _astream(
method generate_prompt (line 783) | def generate_prompt(
method agenerate_prompt (line 794) | async def agenerate_prompt(
method _generate_helper (line 806) | def _generate_helper(
method generate (line 842) | def generate(
method _get_run_ids_list (line 1059) | def _get_run_ids_list(
method _agenerate_helper (line 1074) | async def _agenerate_helper(
method agenerate (line 1117) | async def agenerate(
method _call_async (line 1331) | async def _call_async(
method __str__ (line 1352) | def __str__(self) -> str:
method _llm_type (line 1359) | def _llm_type(self) -> str:
method dict (line 1363) | def dict(self, **kwargs: Any) -> dict:
method save (line 1369) | def save(self, file_path: Path | str) -> None:
class LLM (line 1403) | class LLM(BaseLLM):
method _call (line 1429) | def _call(
method _acall (line 1457) | async def _acall(
method _generate (line 1495) | def _generate(
method _agenerate (line 1514) | async def _agenerate(
FILE: libs/core/langchain_core/language_models/model_profile.py
class ModelProfile (line 13) | class ModelProfile(TypedDict, total=False):
function _warn_unknown_profile_keys (line 128) | def _warn_unknown_profile_keys(profile: ModelProfile) -> None:
FILE: libs/core/langchain_core/load/__init__.py
function __getattr__ (line 36) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 43) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/load/_validation.py
function _needs_escaping (line 36) | def _needs_escaping(obj: dict[str, Any]) -> bool:
function _escape_dict (line 47) | def _escape_dict(obj: dict[str, Any]) -> dict[str, Any]:
function _is_escaped_dict (line 58) | def _is_escaped_dict(obj: dict[str, Any]) -> bool:
function _serialize_value (line 69) | def _serialize_value(obj: Any) -> Any:
function _is_lc_secret (line 105) | def _is_lc_secret(obj: Any) -> bool:
function _serialize_lc_object (line 117) | def _serialize_lc_object(obj: Any) -> dict[str, Any]:
function _unescape_value (line 148) | def _unescape_value(obj: Any) -> Any:
FILE: libs/core/langchain_core/load/dump.py
function default (line 29) | def default(obj: Any) -> Any:
function _dump_pydantic_models (line 43) | def _dump_pydantic_models(obj: Any) -> Any:
function dumps (line 70) | def dumps(obj: Any, *, pretty: bool = False, **kwargs: Any) -> str:
function dumpd (line 105) | def dumpd(obj: Any) -> Any:
FILE: libs/core/langchain_core/load/load.py
function _get_default_allowed_class_paths (line 148) | def _get_default_allowed_class_paths(
function _block_jinja2_templates (line 176) | def _block_jinja2_templates(
function default_init_validator (line 207) | def default_init_validator(
function _compute_allowed_class_paths (line 245) | def _compute_allowed_class_paths(
class Reviver (line 292) | class Reviver:
method __init__ (line 301) | def __init__(
method __call__ (line 392) | def __call__(self, value: dict[str, Any]) -> Any:
function loads (line 505) | def loads(
function load (line 595) | def load(
FILE: libs/core/langchain_core/load/serializable.py
class BaseSerialized (line 20) | class BaseSerialized(TypedDict):
class SerializedConstructor (line 33) | class SerializedConstructor(BaseSerialized):
class SerializedSecret (line 42) | class SerializedSecret(BaseSerialized):
class SerializedNotImplemented (line 49) | class SerializedNotImplemented(BaseSerialized):
function try_neq_default (line 58) | def try_neq_default(value: Any, key: str, model: BaseModel) -> bool:
function _try_neq_default (line 73) | def _try_neq_default(value: Any, field: FieldInfo) -> bool:
class Serializable (line 88) | class Serializable(BaseModel, ABC):
method __init__ (line 116) | def __init__(self, *args: Any, **kwargs: Any) -> None:
method is_lc_serializable (line 121) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 134) | def get_lc_namespace(cls) -> list[str]:
method lc_secrets (line 160) | def lc_secrets(self) -> dict[str, str]:
method lc_attributes (line 168) | def lc_attributes(self) -> dict:
method lc_id (line 178) | def lc_id(cls) -> list[str]:
method __repr_args__ (line 202) | def __repr_args__(self) -> Any:
method to_json (line 209) | def to_json(self) -> SerializedConstructor | SerializedNotImplemented:
method to_json_not_implemented (line 287) | def to_json_not_implemented(self) -> SerializedNotImplemented:
function _is_field_useful (line 296) | def _is_field_useful(inst: Serializable, key: str, value: Any) -> bool:
function _replace_secrets (line 341) | def _replace_secrets(
function to_json_not_implemented (line 362) | def to_json_not_implemented(obj: object) -> SerializedNotImplemented:
FILE: libs/core/langchain_core/messages/__init__.py
function __getattr__ (line 190) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 197) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/messages/ai.py
class InputTokenDetails (line 38) | class InputTokenDetails(TypedDict, total=False):
class OutputTokenDetails (line 74) | class OutputTokenDetails(TypedDict, total=False):
class UsageMetadata (line 104) | class UsageMetadata(TypedDict):
class AIMessage (line 160) | class AIMessage(BaseMessage):
method __init__ (line 186) | def __init__(
method __init__ (line 193) | def __init__(
method __init__ (line 200) | def __init__(
method lc_attributes (line 231) | def lc_attributes(self) -> dict:
method content_blocks (line 243) | def content_blocks(self) -> list[types.ContentBlock]:
method _backwards_compat_tool_calls (line 304) | def _backwards_compat_tool_calls(cls, values: dict) -> Any:
method pretty_repr (line 349) | def pretty_repr(self, html: bool = False) -> str:
class AIMessageChunk (line 413) | class AIMessageChunk(AIMessage, BaseMessageChunk):
method lc_attributes (line 434) | def lc_attributes(self) -> dict:
method content_blocks (line 441) | def content_blocks(self) -> list[types.ContentBlock]:
method init_tool_calls (line 495) | def init_tool_calls(self) -> Self:
method init_server_tool_calls (line 590) | def init_server_tool_calls(self) -> Self:
method __add__ (line 619) | def __add__(self, other: "AIMessageChunk") -> "AIMessageChunk": ...
method __add__ (line 622) | def __add__(self, other: Sequence["AIMessageChunk"]) -> "AIMessageChun...
method __add__ (line 625) | def __add__(self, other: Any) -> BaseMessageChunk: ...
method __add__ (line 628) | def __add__(self, other: Any) -> BaseMessageChunk:
function add_ai_message_chunks (line 638) | def add_ai_message_chunks(
function add_usage (line 721) | def add_usage(left: UsageMetadata | None, right: UsageMetadata | None) -...
function subtract_usage (line 780) | def subtract_usage(
FILE: libs/core/langchain_core/messages/base.py
function _extract_reasoning_from_additional_kwargs (line 24) | def _extract_reasoning_from_additional_kwargs(
class TextAccessor (line 47) | class TextAccessor(str):
method __new__ (line 64) | def __new__(cls, value: str) -> Self:
method __call__ (line 68) | def __call__(self) -> str:
class BaseMessage (line 93) | class BaseMessage(Serializable):
method __init__ (line 147) | def __init__(
method __init__ (line 154) | def __init__(
method __init__ (line 161) | def __init__(
method is_lc_serializable (line 182) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 191) | def get_lc_namespace(cls) -> list[str]:
method content_blocks (line 200) | def content_blocks(self) -> list[types.ContentBlock]:
method text (line 263) | def text(self) -> TextAccessor:
method __add__ (line 294) | def __add__(self, other: Any) -> ChatPromptTemplate:
method pretty_repr (line 309) | def pretty_repr(
method pretty_print (line 344) | def pretty_print(self) -> None:
function merge_content (line 366) | def merge_content(
class BaseMessageChunk (line 409) | class BaseMessageChunk(BaseMessage):
method __add__ (line 412) | def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[ove...
function message_to_dict (line 474) | def message_to_dict(message: BaseMessage) -> dict:
function messages_to_dict (line 488) | def messages_to_dict(messages: Sequence[BaseMessage]) -> list[dict]:
function get_msg_title_repr (line 501) | def get_msg_title_repr(title: str, *, bold: bool = False) -> str:
FILE: libs/core/langchain_core/messages/block_translators/__init__.py
function register_translator (line 39) | def register_translator(
function get_translator (line 57) | def get_translator(
function _register_translators (line 73) | def _register_translators() -> None:
FILE: libs/core/langchain_core/messages/block_translators/anthropic.py
function _populate_extras (line 11) | def _populate_extras(
function _convert_to_v1_from_anthropic_input (line 29) | def _convert_to_v1_from_anthropic_input(
function _convert_citation_to_v1 (line 142) | def _convert_citation_to_v1(citation: dict[str, Any]) -> types.Annotation:
function _convert_to_v1_from_anthropic (line 198) | def _convert_to_v1_from_anthropic(message: AIMessage) -> list[types.Cont...
function translate_content (line 463) | def translate_content(message: AIMessage) -> list[types.ContentBlock]:
function translate_content_chunk (line 475) | def translate_content_chunk(message: AIMessageChunk) -> list[types.Conte...
function _register_anthropic_translator (line 487) | def _register_anthropic_translator() -> None:
FILE: libs/core/langchain_core/messages/block_translators/bedrock.py
function _convert_to_v1_from_bedrock (line 10) | def _convert_to_v1_from_bedrock(message: AIMessage) -> list[types.Conten...
function _convert_to_v1_from_bedrock_chunk (line 35) | def _convert_to_v1_from_bedrock_chunk(
function translate_content (line 67) | def translate_content(message: AIMessage) -> list[types.ContentBlock]:
function translate_content_chunk (line 81) | def translate_content_chunk(message: AIMessageChunk) -> list[types.Conte...
function _register_bedrock_translator (line 96) | def _register_bedrock_translator() -> None:
FILE: libs/core/langchain_core/messages/block_translators/bedrock_converse.py
function _bytes_to_b64_str (line 11) | def _bytes_to_b64_str(bytes_: bytes) -> str:
function _populate_extras (line 15) | def _populate_extras(
function _convert_to_v1_from_converse_input (line 33) | def _convert_to_v1_from_converse_input(
function _convert_citation_to_v1 (line 128) | def _convert_citation_to_v1(citation: dict[str, Any]) -> types.Annotation:
function _convert_to_v1_from_converse (line 152) | def _convert_to_v1_from_converse(message: AIMessage) -> list[types.Conte...
function translate_content (line 283) | def translate_content(message: AIMessage) -> list[types.ContentBlock]:
function translate_content_chunk (line 295) | def translate_content_chunk(message: AIMessageChunk) -> list[types.Conte...
function _register_bedrock_converse_translator (line 307) | def _register_bedrock_converse_translator() -> None:
FILE: libs/core/langchain_core/messages/block_translators/google_genai.py
function _bytes_to_b64_str (line 20) | def _bytes_to_b64_str(bytes_: bytes) -> str:
function translate_grounding_metadata_to_citations (line 25) | def translate_grounding_metadata_to_citations(
function _convert_to_v1_from_genai_input (line 122) | def _convert_to_v1_from_genai_input(
function _convert_to_v1_from_genai (line 299) | def _convert_to_v1_from_genai(message: AIMessage) -> list[types.ContentB...
function translate_content (line 530) | def translate_content(message: AIMessage) -> list[types.ContentBlock]:
function translate_content_chunk (line 542) | def translate_content_chunk(message: AIMessageChunk) -> list[types.Conte...
function _register_google_genai_translator (line 554) | def _register_google_genai_translator() -> None:
FILE: libs/core/langchain_core/messages/block_translators/google_vertexai.py
function _register_google_vertexai_translator (line 9) | def _register_google_vertexai_translator() -> None:
FILE: libs/core/langchain_core/messages/block_translators/groq.py
function _populate_extras (line 12) | def _populate_extras(
function _parse_code_json (line 30) | def _parse_code_json(s: str) -> dict:
function _convert_to_v1_from_groq (line 51) | def _convert_to_v1_from_groq(message: AIMessage) -> list[types.ContentBl...
function translate_content (line 121) | def translate_content(message: AIMessage) -> list[types.ContentBlock]:
function translate_content_chunk (line 133) | def translate_content_chunk(message: AIMessageChunk) -> list[types.Conte...
function _register_groq_translator (line 145) | def _register_groq_translator() -> None:
FILE: libs/core/langchain_core/messages/block_translators/langchain_v0.py
function _convert_v0_multimodal_input_to_v1 (line 8) | def _convert_v0_multimodal_input_to_v1(
function _convert_legacy_v0_content_block_to_v1 (line 46) | def _convert_legacy_v0_content_block_to_v1(
FILE: libs/core/langchain_core/messages/block_translators/openai.py
function convert_to_openai_image_block (line 22) | def convert_to_openai_image_block(block: dict[str, Any]) -> dict:
function convert_to_openai_data_block (line 58) | def convert_to_openai_data_block(
function _convert_to_v1_from_chat_completions (line 154) | def _convert_to_v1_from_chat_completions(
function _convert_to_v1_from_chat_completions_input (line 178) | def _convert_to_v1_from_chat_completions_input(
function _convert_to_v1_from_chat_completions_chunk (line 226) | def _convert_to_v1_from_chat_completions_chunk(
function _convert_from_v1_to_chat_completions (line 263) | def _convert_from_v1_to_chat_completions(message: AIMessage) -> AIMessage:
function _convert_from_v03_ai_message (line 288) | def _convert_from_v03_ai_message(message: AIMessage) -> AIMessage:
function _convert_openai_format_to_data_block (line 423) | def _convert_openai_format_to_data_block(
function _convert_annotation_to_v1 (line 557) | def _convert_annotation_to_v1(annotation: dict[str, Any]) -> types.Annot...
function _explode_reasoning (line 610) | def _explode_reasoning(block: dict[str, Any]) -> Iterator[types.Reasonin...
function _convert_to_v1_from_responses (line 652) | def _convert_to_v1_from_responses(message: AIMessage) -> list[types.Cont...
function translate_content (line 1043) | def translate_content(message: AIMessage) -> list[types.ContentBlock]:
function translate_content_chunk (line 1058) | def translate_content_chunk(message: AIMessageChunk) -> list[types.Conte...
function _register_openai_translator (line 1073) | def _register_openai_translator() -> None:
FILE: libs/core/langchain_core/messages/chat.py
class ChatMessage (line 15) | class ChatMessage(BaseMessage):
class ChatMessageChunk (line 25) | class ChatMessageChunk(ChatMessage, BaseMessageChunk):
method __add__ (line 35) | def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[ove...
FILE: libs/core/langchain_core/messages/content.py
class Citation (line 126) | class Citation(TypedDict):
class NonStandardAnnotation (line 184) | class NonStandardAnnotation(TypedDict):
class TextContentBlock (line 207) | class TextContentBlock(TypedDict):
class ToolCall (line 247) | class ToolCall(TypedDict):
class ToolCallChunk (line 291) | class ToolCallChunk(TypedDict):
class InvalidToolCall (line 336) | class InvalidToolCall(TypedDict):
class ServerToolCall (line 372) | class ServerToolCall(TypedDict):
class ServerToolCallChunk (line 397) | class ServerToolCallChunk(TypedDict):
class ServerToolResult (line 425) | class ServerToolResult(TypedDict):
class ReasoningContentBlock (line 456) | class ReasoningContentBlock(TypedDict):
class ImageContentBlock (line 498) | class ImageContentBlock(TypedDict):
class VideoContentBlock (line 549) | class VideoContentBlock(TypedDict):
class AudioContentBlock (line 600) | class AudioContentBlock(TypedDict):
class PlainTextContentBlock (line 651) | class PlainTextContentBlock(TypedDict):
class FileContentBlock (line 721) | class FileContentBlock(TypedDict):
class NonStandardContentBlock (line 790) | class NonStandardContentBlock(TypedDict):
function _get_data_content_block_types (line 884) | def _get_data_content_block_types() -> tuple[str, ...]:
function is_data_content_block (line 908) | def is_data_content_block(block: dict) -> bool:
function create_text_block (line 950) | def create_text_block(
function create_image_block (line 995) | def create_image_block(
function create_video_block (line 1057) | def create_video_block(
function create_audio_block (line 1123) | def create_audio_block(
function create_file_block (line 1189) | def create_file_block(
function create_plaintext_block (line 1255) | def create_plaintext_block(
function create_tool_call (line 1318) | def create_tool_call(
function create_reasoning_block (line 1363) | def create_reasoning_block(
function create_citation (line 1404) | def create_citation(
function create_non_standard_block (line 1454) | def create_non_standard_block(
FILE: libs/core/langchain_core/messages/function.py
class FunctionMessage (line 15) | class FunctionMessage(BaseMessage):
class FunctionMessageChunk (line 34) | class FunctionMessageChunk(FunctionMessage, BaseMessageChunk):
method __add__ (line 44) | def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[ove...
FILE: libs/core/langchain_core/messages/human.py
class HumanMessage (line 9) | class HumanMessage(BaseMessage):
method __init__ (line 33) | def __init__(
method __init__ (line 40) | def __init__(
method __init__ (line 47) | def __init__(
class HumanMessageChunk (line 63) | class HumanMessageChunk(HumanMessage, BaseMessageChunk):
FILE: libs/core/langchain_core/messages/modifier.py
class RemoveMessage (line 8) | class RemoveMessage(BaseMessage):
method __init__ (line 14) | def __init__(
FILE: libs/core/langchain_core/messages/system.py
class SystemMessage (line 9) | class SystemMessage(BaseMessage):
method __init__ (line 33) | def __init__(
method __init__ (line 40) | def __init__(
method __init__ (line 47) | def __init__(
class SystemMessageChunk (line 63) | class SystemMessageChunk(SystemMessage, BaseMessageChunk):
FILE: libs/core/langchain_core/messages/tool.py
class ToolOutputMixin (line 16) | class ToolOutputMixin:
class ToolMessage (line 26) | class ToolMessage(BaseMessage, ToolOutputMixin):
method coerce_args (line 92) | def coerce_args(cls, values: dict) -> dict:
method __init__ (line 136) | def __init__(
method __init__ (line 143) | def __init__(
method __init__ (line 150) | def __init__(
class ToolMessageChunk (line 174) | class ToolMessageChunk(ToolMessage, BaseMessageChunk):
method __add__ (line 183) | def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[ove...
class ToolCall (line 206) | class ToolCall(TypedDict):
function tool_call (line 242) | def tool_call(
class ToolCallChunk (line 261) | class ToolCallChunk(TypedDict):
function tool_call_chunk (line 303) | def tool_call_chunk(
function invalid_tool_call (line 326) | def invalid_tool_call(
function default_tool_parser (line 349) | def default_tool_parser(
function default_tool_chunk_parser (line 386) | def default_tool_chunk_parser(raw_tool_calls: list[dict]) -> list[ToolCa...
function _merge_status (line 413) | def _merge_status(
FILE: libs/core/langchain_core/messages/utils.py
function _get_type (line 68) | def _get_type(v: Any) -> str:
function _has_base64_data (line 104) | def _has_base64_data(block: dict) -> bool:
function _truncate (line 135) | def _truncate(text: str, max_len: int = _XML_CONTENT_BLOCK_MAX_LEN) -> str:
function _format_content_block_xml (line 142) | def _format_content_block_xml(block: dict) -> str | None:
function _get_message_type_str (line 247) | def _get_message_type_str(
function get_buffer_string (line 287) | def get_buffer_string(
function _message_from_dict (line 510) | def _message_from_dict(message: dict) -> BaseMessage:
function messages_from_dict (line 542) | def messages_from_dict(messages: Sequence[dict]) -> list[BaseMessage]:
function message_chunk_to_message (line 555) | def message_chunk_to_message(chunk: BaseMessage) -> BaseMessage:
function _create_message_from_message_type (line 584) | def _create_message_from_message_type(
function _convert_to_message (line 675) | def _convert_to_message(message: MessageLikeRepresentation) -> BaseMessage:
function convert_to_messages (line 735) | def convert_to_messages(
class _RunnableSupportCallable (line 759) | class _RunnableSupportCallable(Protocol[_P, _R_co]):
method __call__ (line 761) | def __call__(
method __call__ (line 769) | def __call__(
method __call__ (line 776) | def __call__(
function _runnable_support (line 784) | def _runnable_support(
function filter_messages (line 806) | def filter_messages(
function merge_message_runs (line 951) | def merge_message_runs(
function trim_messages (line 1082) | def trim_messages(
function convert_to_openai_messages (line 1478) | def convert_to_openai_messages(
function convert_to_openai_messages (line 1488) | def convert_to_openai_messages(
function convert_to_openai_messages (line 1497) | def convert_to_openai_messages(
function _first_max_tokens (line 1912) | def _first_max_tokens(
function _last_max_tokens (line 2028) | def _last_max_tokens(
function _msg_to_chunk (line 2094) | def _msg_to_chunk(message: BaseMessage) -> BaseMessageChunk:
function _chunk_to_msg (line 2110) | def _chunk_to_msg(chunk: BaseMessageChunk) -> BaseMessage:
function _default_text_splitter (line 2131) | def _default_text_splitter(text: str) -> list[str]:
function _is_message_type (line 2136) | def _is_message_type(
function _bytes_to_b64_str (line 2147) | def _bytes_to_b64_str(bytes_: bytes) -> str:
function _get_message_openai_role (line 2151) | def _get_message_openai_role(message: BaseMessage) -> str:
function _convert_to_openai_tool_calls (line 2172) | def _convert_to_openai_tool_calls(tool_calls: list[ToolCall]) -> list[di...
function count_tokens_approximately (line 2186) | def count_tokens_approximately(
function _approximate_token_counter (line 2346) | def _approximate_token_counter(messages: Sequence[BaseMessage]) -> int:
FILE: libs/core/langchain_core/output_parsers/__init__.py
function __getattr__ (line 93) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 100) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/output_parsers/base.py
class BaseLLMOutputParser (line 30) | class BaseLLMOutputParser(ABC, Generic[T]):
method parse_result (line 34) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
method aparse_result (line 50) | async def aparse_result(
class BaseGenerationOutputParser (line 70) | class BaseGenerationOutputParser(
method InputType (line 77) | def InputType(self) -> Any:
method OutputType (line 83) | def OutputType(self) -> type[T]:
method invoke (line 90) | def invoke(
method ainvoke (line 113) | async def ainvoke(
class BaseOutputParser (line 136) | class BaseOutputParser(
method InputType (line 173) | def InputType(self) -> Any:
method OutputType (line 179) | def OutputType(self) -> type[T]:
method invoke (line 200) | def invoke(
method ainvoke (line 223) | async def ainvoke(
method parse_result (line 246) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
method parse (line 267) | def parse(self, text: str) -> T:
method aparse_result (line 277) | async def aparse_result(
method aparse (line 299) | async def aparse(self, text: str) -> T:
method parse_with_prompt (line 311) | def parse_with_prompt(
method get_format_instructions (line 330) | def get_format_instructions(self) -> str:
method _type (line 335) | def _type(self) -> str:
method dict (line 343) | def dict(self, **kwargs: Any) -> dict:
FILE: libs/core/langchain_core/output_parsers/json.py
class JsonOutputParser (line 31) | class JsonOutputParser(BaseCumulativeTransformOutputParser[Any]):
method _diff (line 51) | def _diff(self, prev: Any | None, next: Any) -> Any:
method _get_schema (line 55) | def _get_schema(pydantic_object: type[TBaseModel]) -> dict[str, Any]:
method parse_result (line 61) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
method parse (line 93) | def parse(self, text: str) -> Any:
method get_format_instructions (line 104) | def get_format_instructions(self) -> str:
method _type (line 126) | def _type(self) -> str:
FILE: libs/core/langchain_core/output_parsers/list.py
function droplastn (line 23) | def droplastn(
class ListOutputParser (line 43) | class ListOutputParser(BaseTransformOutputParser[list[str]]):
method _type (line 47) | def _type(self) -> str:
method parse (line 51) | def parse(self, text: str) -> list[str]:
method parse_iter (line 61) | def parse_iter(self, text: str) -> Iterator[re.Match]:
method _transform (line 73) | def _transform(self, input: Iterator[str | BaseMessage]) -> Iterator[l...
method _atransform (line 105) | async def _atransform(
class CommaSeparatedListOutputParser (line 139) | class CommaSeparatedListOutputParser(ListOutputParser):
method is_lc_serializable (line 143) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 148) | def get_lc_namespace(cls) -> list[str]:
method get_format_instructions (line 157) | def get_format_instructions(self) -> str:
method parse (line 165) | def parse(self, text: str) -> list[str]:
method _type (line 184) | def _type(self) -> str:
class NumberedListOutputParser (line 188) | class NumberedListOutputParser(ListOutputParser):
method get_format_instructions (line 195) | def get_format_instructions(self) -> str:
method parse (line 201) | def parse(self, text: str) -> list[str]:
method parse_iter (line 213) | def parse_iter(self, text: str) -> Iterator[re.Match]:
method _type (line 217) | def _type(self) -> str:
class MarkdownListOutputParser (line 221) | class MarkdownListOutputParser(ListOutputParser):
method get_format_instructions (line 228) | def get_format_instructions(self) -> str:
method parse (line 232) | def parse(self, text: str) -> list[str]:
method parse_iter (line 244) | def parse_iter(self, text: str) -> Iterator[re.Match]:
method _type (line 248) | def _type(self) -> str:
FILE: libs/core/langchain_core/output_parsers/openai_functions.py
class OutputFunctionsParser (line 22) | class OutputFunctionsParser(BaseGenerationOutputParser[Any]):
method parse_result (line 29) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
class JsonOutputFunctionsParser (line 58) | class JsonOutputFunctionsParser(BaseCumulativeTransformOutputParser[Any]):
method _type (line 73) | def _type(self) -> str:
method _diff (line 77) | def _diff(self, prev: Any | None, next: Any) -> Any:
method parse_result (line 80) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
method parse (line 145) | def parse(self, text: str) -> Any:
class JsonKeyOutputFunctionsParser (line 157) | class JsonKeyOutputFunctionsParser(JsonOutputFunctionsParser):
method parse_result (line 163) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
class PydanticOutputFunctionsParser (line 179) | class PydanticOutputFunctionsParser(OutputFunctionsParser):
method validate_schema (line 231) | def validate_schema(cls, values: dict[str, Any]) -> Any:
method parse_result (line 259) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
class PydanticAttrOutputFunctionsParser (line 295) | class PydanticAttrOutputFunctionsParser(PydanticOutputFunctionsParser):
method parse_result (line 302) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
FILE: libs/core/langchain_core/output_parsers/openai_tools.py
function parse_tool_call (line 27) | def parse_tool_call(
function make_invalid_tool_call (line 81) | def make_invalid_tool_call(
function parse_tool_calls (line 102) | def parse_tool_calls(
class JsonOutputToolsParser (line 140) | class JsonOutputToolsParser(BaseCumulativeTransformOutputParser[Any]):
method parse_result (line 166) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
method parse (line 213) | def parse(self, text: str) -> Any:
class JsonOutputKeyToolsParser (line 225) | class JsonOutputKeyToolsParser(JsonOutputToolsParser):
method parse_result (line 231) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
class PydanticToolsParser (line 306) | class PydanticToolsParser(JsonOutputToolsParser):
method parse_result (line 314) | def parse_result(self, result: list[Generation], *, partial: bool = Fa...
FILE: libs/core/langchain_core/output_parsers/pydantic.py
class PydanticOutputParser (line 19) | class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]):
method _parse_obj (line 25) | def _parse_obj(self, obj: dict) -> TBaseModel:
method _parser_exception (line 37) | def _parser_exception(
method parse_result (line 46) | def parse_result(
method parse_result (line 51) | def parse_result(
method parse_result (line 55) | def parse_result(
method parse (line 82) | def parse(self, text: str) -> TBaseModel:
method get_format_instructions (line 93) | def get_format_instructions(self) -> str:
method _type (line 114) | def _type(self) -> str:
method OutputType (line 119) | def OutputType(self) -> type[TBaseModel]:
FILE: libs/core/langchain_core/output_parsers/string.py
class StrOutputParser (line 8) | class StrOutputParser(BaseTransformOutputParser[str]):
method is_lc_serializable (line 38) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 47) | def get_lc_namespace(cls) -> list[str]:
method _type (line 56) | def _type(self) -> str:
method parse (line 61) | def parse(self, text: str) -> str:
FILE: libs/core/langchain_core/output_parsers/transform.py
class BaseTransformOutputParser (line 28) | class BaseTransformOutputParser(BaseOutputParser[T]):
method _transform (line 31) | def _transform(
method _atransform (line 41) | async def _atransform(
method transform (line 56) | def transform(
method atransform (line 77) | async def atransform(
class BaseCumulativeTransformOutputParser (line 99) | class BaseCumulativeTransformOutputParser(BaseTransformOutputParser[T]):
method _diff (line 107) | def _diff(
method _transform (line 126) | def _transform(self, input: Iterator[str | BaseMessage]) -> Iterator[A...
method _atransform (line 151) | async def _atransform(
FILE: libs/core/langchain_core/output_parsers/xml.py
class _StreamingParser (line 42) | class _StreamingParser:
method __init__ (line 49) | def __init__(self, parser: Literal["defusedxml", "xml"]) -> None:
method parse (line 80) | def parse(self, chunk: str | BaseMessage) -> Iterator[AddableDict]:
method close (line 141) | def close(self) -> None:
class XMLOutputParser (line 151) | class XMLOutputParser(BaseTransformOutputParser):
method get_format_instructions (line 202) | def get_format_instructions(self) -> str:
method parse (line 206) | def parse(self, text: str) -> dict[str, str | list[Any]]:
method _transform (line 253) | def _transform(self, input: Iterator[str | BaseMessage]) -> Iterator[A...
method _atransform (line 260) | async def _atransform(
method _root_to_dict (line 269) | def _root_to_dict(self, root: ET.Element) -> dict[str, str | list[Any]]:
method _type (line 284) | def _type(self) -> str:
function nested_element (line 288) | def nested_element(path: list[str], elem: ET.Element) -> Any:
FILE: libs/core/langchain_core/outputs/__init__.py
function __getattr__ (line 58) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 65) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/outputs/chat_generation.py
class ChatGeneration (line 17) | class ChatGeneration(Generation):
method set_text (line 45) | def set_text(self) -> Self:
class ChatGenerationChunk (line 87) | class ChatGenerationChunk(ChatGeneration):
method __add__ (line 100) | def __add__(
function merge_chat_generation_chunks (line 140) | def merge_chat_generation_chunks(
FILE: libs/core/langchain_core/outputs/chat_result.py
class ChatResult (line 8) | class ChatResult(BaseModel):
FILE: libs/core/langchain_core/outputs/generation.py
class Generation (line 11) | class Generation(Serializable):
method is_lc_serializable (line 41) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 46) | def get_lc_namespace(cls) -> list[str]:
class GenerationChunk (line 55) | class GenerationChunk(Generation):
method __add__ (line 58) | def __add__(self, other: GenerationChunk) -> GenerationChunk:
FILE: libs/core/langchain_core/outputs/llm_result.py
class LLMResult (line 15) | class LLMResult(BaseModel):
method flatten (line 60) | def flatten(self) -> list[LLMResult]:
method __eq__ (line 96) | def __eq__(self, other: object) -> bool:
FILE: libs/core/langchain_core/outputs/run_info.py
class RunInfo (line 10) | class RunInfo(BaseModel):
FILE: libs/core/langchain_core/prompt_values.py
class PromptValue (line 24) | class PromptValue(Serializable, ABC):
method is_lc_serializable (line 32) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 37) | def get_lc_namespace(cls) -> list[str]:
method to_string (line 46) | def to_string(self) -> str:
method to_messages (line 50) | def to_messages(self) -> list[BaseMessage]:
class StringPromptValue (line 54) | class StringPromptValue(PromptValue):
method get_lc_namespace (line 63) | def get_lc_namespace(cls) -> list[str]:
method to_string (line 71) | def to_string(self) -> str:
method to_messages (line 75) | def to_messages(self) -> list[BaseMessage]:
class ChatPromptValue (line 80) | class ChatPromptValue(PromptValue):
method to_string (line 89) | def to_string(self) -> str:
method to_messages (line 93) | def to_messages(self) -> list[BaseMessage]:
method get_lc_namespace (line 98) | def get_lc_namespace(cls) -> list[str]:
class ImageURL (line 107) | class ImageURL(TypedDict, total=False):
class ImagePromptValue (line 135) | class ImagePromptValue(PromptValue):
method to_string (line 143) | def to_string(self) -> str:
method to_messages (line 147) | def to_messages(self) -> list[BaseMessage]:
class ChatPromptValueConcrete (line 152) | class ChatPromptValueConcrete(ChatPromptValue):
FILE: libs/core/langchain_core/prompts/__init__.py
function __getattr__ (line 93) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 100) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/prompts/base.py
class BasePromptTemplate (line 40) | class BasePromptTemplate(
method validate_variable_names (line 80) | def validate_variable_names(self) -> Self:
method get_lc_namespace (line 108) | def get_lc_namespace(cls) -> list[str]:
method is_lc_serializable (line 117) | def is_lc_serializable(cls) -> bool:
method _serialized (line 126) | def _serialized(self) -> dict[str, Any]:
method OutputType (line 134) | def OutputType(self) -> Any:
method get_input_schema (line 139) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method _validate_input (line 160) | def _validate_input(self, inner_input: Any) -> dict:
method _format_prompt_with_error_handling (line 196) | def _format_prompt_with_error_handling(self, inner_input: dict) -> Pro...
method _aformat_prompt_with_error_handling (line 200) | async def _aformat_prompt_with_error_handling(
method invoke (line 207) | def invoke(
method ainvoke (line 233) | async def ainvoke(
method format_prompt (line 259) | def format_prompt(self, **kwargs: Any) -> PromptValue:
method aformat_prompt (line 269) | async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
method partial (line 280) | def partial(self, **kwargs: str | Callable[[], str]) -> BasePromptTemp...
method _merge_partial_and_user_variables (line 296) | def _merge_partial_and_user_variables(self, **kwargs: Any) -> dict[str...
method format (line 304) | def format(self, **kwargs: Any) -> FormatOutputType:
method aformat (line 319) | async def aformat(self, **kwargs: Any) -> FormatOutputType:
method _prompt_type (line 336) | def _prompt_type(self) -> str:
method dict (line 340) | def dict(self, **kwargs: Any) -> dict:
method save (line 360) | def save(self, file_path: Path | str) -> None:
function _get_document_info (line 403) | def _get_document_info(doc: Document, prompt: BasePromptTemplate[str]) -...
function format_document (line 421) | def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
function aformat_document (line 456) | async def aformat_document(doc: Document, prompt: BasePromptTemplate[str...
FILE: libs/core/langchain_core/prompts/chat.py
class MessagesPlaceholder (line 53) | class MessagesPlaceholder(BaseMessagePromptTemplate):
method __init__ (line 145) | def __init__(
method format_messages (line 164) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method input_variables (line 193) | def input_variables(self) -> list[str]:
method pretty_repr (line 202) | def pretty_repr(self, html: bool = False) -> str:
class BaseStringMessagePromptTemplate (line 226) | class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
method from_template (line 236) | def from_template(
method from_template_file (line 268) | def from_template_file(
method format (line 286) | def format(self, **kwargs: Any) -> BaseMessage:
method aformat (line 296) | async def aformat(self, **kwargs: Any) -> BaseMessage:
method format_messages (line 307) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method aformat_messages (line 318) | async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
method input_variables (line 330) | def input_variables(self) -> list[str]:
method pretty_repr (line 339) | def pretty_repr(self, html: bool = False) -> str:
class ChatMessagePromptTemplate (line 354) | class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate):
method format (line 360) | def format(self, **kwargs: Any) -> BaseMessage:
method aformat (line 374) | async def aformat(self, **kwargs: Any) -> BaseMessage:
class _TextTemplateParam (line 389) | class _TextTemplateParam(TypedDict, total=False):
class _ImageTemplateParam (line 393) | class _ImageTemplateParam(TypedDict, total=False):
class _StringImageMessagePromptTemplate (line 397) | class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
method from_template (line 411) | def from_template(
method from_template_file (line 533) | def from_template_file(
method format_messages (line 552) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method aformat_messages (line 563) | async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
method input_variables (line 575) | def input_variables(self) -> list[str]:
method format (line 584) | def format(self, **kwargs: Any) -> BaseMessage:
method aformat (line 615) | async def aformat(self, **kwargs: Any) -> BaseMessage:
method pretty_repr (line 647) | def pretty_repr(self, html: bool = False) -> str:
class HumanMessagePromptTemplate (line 664) | class HumanMessagePromptTemplate(_StringImageMessagePromptTemplate):
class AIMessagePromptTemplate (line 673) | class AIMessagePromptTemplate(_StringImageMessagePromptTemplate):
class SystemMessagePromptTemplate (line 682) | class SystemMessagePromptTemplate(_StringImageMessagePromptTemplate):
class BaseChatPromptTemplate (line 691) | class BaseChatPromptTemplate(BasePromptTemplate, ABC):
method lc_attributes (line 696) | def lc_attributes(self) -> dict:
method format (line 699) | def format(self, **kwargs: Any) -> str:
method aformat (line 711) | async def aformat(self, **kwargs: Any) -> str:
method format_prompt (line 723) | def format_prompt(self, **kwargs: Any) -> ChatPromptValue:
method aformat_prompt (line 734) | async def aformat_prompt(self, **kwargs: Any) -> ChatPromptValue:
method format_messages (line 746) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method aformat_messages (line 753) | async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
method pretty_repr (line 761) | def pretty_repr(
method pretty_print (line 775) | def pretty_print(self) -> None:
class ChatPromptTemplate (line 790) | class ChatPromptTemplate(BaseChatPromptTemplate):
method __init__ (line 903) | def __init__(
method get_lc_namespace (line 999) | def get_lc_namespace(cls) -> list[str]:
method __add__ (line 1007) | def __add__(self, other: Any) -> ChatPromptTemplate:
method validate_input_variables (line 1049) | def validate_input_variables(cls, values: dict) -> Any:
method from_template (line 1102) | def from_template(cls, template: str, **kwargs: Any) -> ChatPromptTemp...
method from_messages (line 1120) | def from_messages(
method format_messages (line 1170) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method aformat_messages (line 1198) | async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
method partial (line 1226) | def partial(self, **kwargs: Any) -> ChatPromptTemplate:
method append (line 1261) | def append(self, message: MessageLikeRepresentation) -> None:
method extend (line 1269) | def extend(self, messages: Sequence[MessageLikeRepresentation]) -> None:
method __getitem__ (line 1280) | def __getitem__(self, index: int) -> MessageLike: ...
method __getitem__ (line 1283) | def __getitem__(self, index: slice) -> ChatPromptTemplate: ...
method __getitem__ (line 1285) | def __getitem__(self, index: int | slice) -> MessageLike | ChatPromptT...
method __len__ (line 1300) | def __len__(self) -> int:
method _prompt_type (line 1305) | def _prompt_type(self) -> str:
method save (line 1315) | def save(self, file_path: Path | str) -> None:
method pretty_repr (line 1324) | def pretty_repr(self, html: bool = False) -> str:
function _create_template_from_message_type (line 1337) | def _create_template_from_message_type(
function _convert_to_message_template (line 1414) | def _convert_to_message_template(
FILE: libs/core/langchain_core/prompts/dict.py
class DictPromptTemplate (line 18) | class DictPromptTemplate(RunnableSerializable[dict, dict]):
method input_variables (line 30) | def input_variables(self) -> list[str]:
method format (line 34) | def format(self, **kwargs: Any) -> dict[str, Any]:
method aformat (line 42) | async def aformat(self, **kwargs: Any) -> dict[str, Any]:
method invoke (line 51) | def invoke(
method _prompt_type (line 64) | def _prompt_type(self) -> str:
method _serialized (line 68) | def _serialized(self) -> dict[str, Any]:
method is_lc_serializable (line 75) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 80) | def get_lc_namespace(cls) -> list[str]:
method pretty_repr (line 88) | def pretty_repr(self, *, html: bool = False) -> str:
function _get_input_variables (line 100) | def _get_input_variables(
function _insert_input_variables (line 118) | def _insert_input_variables(
FILE: libs/core/langchain_core/prompts/few_shot.py
class _FewShotPromptTemplateMixin (line 34) | class _FewShotPromptTemplateMixin(BaseModel):
method check_examples_and_selector (line 56) | def check_examples_and_selector(cls, values: dict) -> Any:
method _get_examples (line 82) | def _get_examples(self, **kwargs: Any) -> list[dict]:
method _aget_examples (line 101) | async def _aget_examples(self, **kwargs: Any) -> list[dict]:
class FewShotPromptTemplate (line 121) | class FewShotPromptTemplate(_FewShotPromptTemplateMixin, StringPromptTem...
method is_lc_serializable (line 125) | def is_lc_serializable(cls) -> bool:
method __init__ (line 150) | def __init__(self, **kwargs: Any) -> None:
method template_is_valid (line 157) | def template_is_valid(self) -> Self:
method format (line 180) | def format(self, **kwargs: Any) -> str:
method aformat (line 208) | async def aformat(self, **kwargs: Any) -> str:
method _prompt_type (line 237) | def _prompt_type(self) -> str:
method save (line 247) | def save(self, file_path: Path | str) -> None:
class FewShotChatMessagePromptTemplate (line 262) | class FewShotChatMessagePromptTemplate(
method is_lc_serializable (line 388) | def is_lc_serializable(cls) -> bool:
method format_messages (line 397) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method aformat_messages (line 418) | async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
method format (line 439) | def format(self, **kwargs: Any) -> str:
method aformat (line 456) | async def aformat(self, **kwargs: Any) -> str:
method pretty_repr (line 474) | def pretty_repr(self, html: bool = False) -> str:
FILE: libs/core/langchain_core/prompts/few_shot_with_templates.py
class FewShotPromptWithTemplates (line 19) | class FewShotPromptWithTemplates(StringPromptTemplate):
method get_lc_namespace (line 56) | def get_lc_namespace(cls) -> list[str]:
method check_examples_and_selector (line 66) | def check_examples_and_selector(cls, values: dict) -> Any:
method template_is_valid (line 81) | def template_is_valid(self) -> Self:
method _get_examples (line 109) | def _get_examples(self, **kwargs: Any) -> list[dict]:
method _aget_examples (line 116) | async def _aget_examples(self, **kwargs: Any) -> list[dict]:
method format (line 123) | def format(self, **kwargs: Any) -> str:
method aformat (line 170) | async def aformat(self, **kwargs: Any) -> str:
method _prompt_type (line 215) | def _prompt_type(self) -> str:
method save (line 225) | def save(self, file_path: Path | str) -> None:
FILE: libs/core/langchain_core/prompts/image.py
class ImagePromptTemplate (line 16) | class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
method __init__ (line 28) | def __init__(self, **kwargs: Any) -> None:
method _prompt_type (line 49) | def _prompt_type(self) -> str:
method get_lc_namespace (line 54) | def get_lc_namespace(cls) -> list[str]:
method format_prompt (line 62) | def format_prompt(self, **kwargs: Any) -> PromptValue:
method aformat_prompt (line 73) | async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
method format (line 84) | def format(
method aformat (line 134) | async def aformat(self, **kwargs: Any) -> ImageURL:
method pretty_repr (line 145) | def pretty_repr(
FILE: libs/core/langchain_core/prompts/loading.py
function _validate_path (line 21) | def _validate_path(path: Path) -> None:
function load_prompt_from_config (line 54) | def load_prompt_from_config(
function _load_template (line 85) | def _load_template(
function _load_examples (line 109) | def _load_examples(config: dict, *, allow_dangerous_paths: bool = False)...
function _load_output_parser (line 132) | def _load_output_parser(config: dict) -> dict:
function _load_few_shot_prompt (line 142) | def _load_few_shot_prompt(
function _load_prompt (line 177) | def _load_prompt(
function load_prompt (line 207) | def load_prompt(
function _load_prompt_from_file (line 242) | def _load_prompt_from_file(
function _load_chat_prompt (line 265) | def _load_chat_prompt(
FILE: libs/core/langchain_core/prompts/message.py
class BaseMessagePromptTemplate (line 16) | class BaseMessagePromptTemplate(Serializable, ABC):
method is_lc_serializable (line 20) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 25) | def get_lc_namespace(cls) -> list[str]:
method format_messages (line 34) | def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
method aformat_messages (line 46) | async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
method input_variables (line 59) | def input_variables(self) -> list[str]:
method pretty_repr (line 66) | def pretty_repr(
method pretty_print (line 80) | def pretty_print(self) -> None:
method __add__ (line 84) | def __add__(self, other: Any) -> ChatPromptTemplate:
FILE: libs/core/langchain_core/prompts/prompt.py
class PromptTemplate (line 24) | class PromptTemplate(StringPromptTemplate):
method lc_attributes (line 62) | def lc_attributes(self) -> dict[str, Any]:
method get_lc_namespace (line 69) | def get_lc_namespace(cls) -> list[str]:
method pre_init_validation (line 91) | def pre_init_validation(cls, values: dict) -> Any:
method get_input_schema (line 128) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method __add__ (line 142) | def __add__(self, other: Any) -> PromptTemplate:
method _prompt_type (line 187) | def _prompt_type(self) -> str:
method format (line 191) | def format(self, **kwargs: Any) -> str:
method from_examples (line 204) | def from_examples(
method from_file (line 236) | def from_file(
method from_template (line 257) | def from_template(
FILE: libs/core/langchain_core/prompts/string.py
function jinja2_formatter (line 33) | def jinja2_formatter(template: str, /, **kwargs: Any) -> str:
function validate_jinja2 (line 75) | def validate_jinja2(template: str, input_variables: list[str]) -> None:
function _get_jinja2_variables_from_template (line 100) | def _get_jinja2_variables_from_template(template: str) -> set[str]:
function mustache_formatter (line 112) | def mustache_formatter(template: str, /, **kwargs: Any) -> str:
function mustache_template_vars (line 125) | def mustache_template_vars(
function mustache_schema (line 158) | def mustache_schema(template: str) -> type[BaseModel]:
function _create_model_recursive (line 197) | def _create_model_recursive(name: str, defs: Defs) -> type[BaseModel]:
function check_valid_template (line 222) | def check_valid_template(
function get_template_variables (line 256) | def get_template_variables(template: str, template_format: str) -> list[...
class StringPromptTemplate (line 311) | class StringPromptTemplate(BasePromptTemplate, ABC):
method get_lc_namespace (line 315) | def get_lc_namespace(cls) -> list[str]:
method format_prompt (line 323) | def format_prompt(self, **kwargs: Any) -> PromptValue:
method aformat_prompt (line 334) | async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
method format (line 347) | def format(self, **kwargs: Any) -> str: ...
method pretty_repr (line 349) | def pretty_repr(
method pretty_print (line 371) | def pretty_print(self) -> None:
function is_subsequence (line 376) | def is_subsequence(child: Sequence, parent: Sequence) -> bool:
FILE: libs/core/langchain_core/prompts/structured.py
class StructuredPrompt (line 28) | class StructuredPrompt(ChatPromptTemplate):
method __init__ (line 36) | def __init__(
method get_lc_namespace (line 75) | def get_lc_namespace(cls) -> list[str]:
method from_messages_and_schema (line 87) | def from_messages_and_schema(
method __or__ (line 139) | def __or__(
method pipe (line 149) | def pipe(
FILE: libs/core/langchain_core/rate_limiters.py
class BaseRateLimiter (line 11) | class BaseRateLimiter(abc.ABC):
method acquire (line 29) | def acquire(self, *, blocking: bool = True) -> bool:
method aacquire (line 48) | async def aacquire(self, *, blocking: bool = True) -> bool:
class InMemoryRateLimiter (line 67) | class InMemoryRateLimiter(BaseRateLimiter):
method __init__ (line 120) | def __init__(
method _consume (line 165) | def _consume(self) -> bool:
method acquire (line 197) | def acquire(self, *, blocking: bool = True) -> bool:
method aacquire (line 222) | async def aacquire(self, *, blocking: bool = True) -> bool:
FILE: libs/core/langchain_core/retrievers.py
class LangSmithRetrieverParams (line 39) | class LangSmithRetrieverParams(TypedDict, total=False):
class BaseRetriever (line 55) | class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput...
method __init_subclass__ (line 146) | def __init_subclass__(cls, **kwargs: Any) -> None:
method _get_ls_params (line 167) | def _get_ls_params(self, **_kwargs: Any) -> LangSmithRetrieverParams:
method invoke (line 179) | def invoke(
method ainvoke (line 237) | async def ainvoke(
method _get_relevant_documents (line 298) | def _get_relevant_documents(
method _aget_relevant_documents (line 311) | async def _aget_relevant_documents(
FILE: libs/core/langchain_core/runnables/__init__.py
function __getattr__ (line 128) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 135) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/runnables/base.py
class Runnable (line 124) | class Runnable(ABC, Generic[Input, Output]):
method get_name (line 261) | def get_name(self, suffix: str | None = None, *, name: str | None = No...
method InputType (line 300) | def InputType(self) -> type[Input]: # noqa: N802
method OutputType (line 335) | def OutputType(self) -> type[Output]: # noqa: N802
method input_schema (line 366) | def input_schema(self) -> type[BaseModel]:
method get_input_schema (line 370) | def get_input_schema(
method get_input_jsonschema (line 411) | def get_input_jsonschema(
method output_schema (line 442) | def output_schema(self) -> type[BaseModel]:
method get_output_schema (line 449) | def get_output_schema(
method get_output_jsonschema (line 490) | def get_output_jsonschema(
method config_specs (line 521) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method config_schema (line 525) | def config_schema(self, *, include: Sequence[str] | None = None) -> ty...
method get_config_jsonschema (line 568) | def get_config_jsonschema(
method get_graph (line 584) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method get_prompts (line 605) | def get_prompts(
method __or__ (line 618) | def __or__(
method __ror__ (line 639) | def __ror__(
method pipe (line 660) | def pipe(
method pick (line 709) | def pick(self, keys: str | list[str]) -> RunnableSerializable[Any, Any]:
method assign (line 772) | def assign(
method invoke (line 822) | def invoke(
method ainvoke (line 844) | async def ainvoke(
method batch (line 867) | def batch(
method batch_as_completed (line 918) | def batch_as_completed(
method batch_as_completed (line 928) | def batch_as_completed(
method batch_as_completed (line 937) | def batch_as_completed(
method abatch (line 1002) | async def abatch(
method abatch_as_completed (line 1051) | def abatch_as_completed(
method abatch_as_completed (line 1061) | def abatch_as_completed(
method abatch_as_completed (line 1070) | async def abatch_as_completed(
method stream (line 1130) | def stream(
method astream (line 1151) | async def astream(
method astream_log (line 1173) | def astream_log(
method astream_log (line 1190) | def astream_log(
method astream_log (line 1206) | async def astream_log(
method astream_events (line 1273) | async def astream_events(
method transform (line 1519) | def transform(
method atransform (line 1564) | async def atransform(
method bind (line 1610) | def bind(self, **kwargs: Any) -> Runnable[Input, Output]:
method with_config (line 1644) | def with_config(
method with_listeners (line 1669) | def with_listeners(
method with_alisteners (line 1741) | def with_alisteners(
method with_types (line 1838) | def with_types(
method with_retry (line 1860) | def with_retry(
method map (line 1924) | def map(self) -> Runnable[list[Input], list[Output]]:
method with_fallbacks (line 1947) | def with_fallbacks(
method _call_with_config (line 2027) | def _call_with_config(
method _acall_with_config (line 2076) | async def _acall_with_config(
method _batch_with_config (line 2119) | def _batch_with_config(
method _abatch_with_config (line 2186) | async def _abatch_with_config(
method _transform_stream_with_config (line 2261) | def _transform_stream_with_config(
method _atransform_stream_with_config (line 2359) | async def _atransform_stream_with_config(
method as_tool (line 2467) | def as_tool(
class RunnableSerializable (line 2586) | class RunnableSerializable(Serializable, Runnable[Input, Output]):
method to_json (line 2602) | def to_json(self) -> SerializedConstructor | SerializedNotImplemented:
method configurable_fields (line 2614) | def configurable_fields(
method configurable_alternatives (line 2672) | def configurable_alternatives(
function _seq_input_schema (line 2733) | def _seq_input_schema(
function _seq_output_schema (line 2763) | def _seq_output_schema(
class RunnableSequence (line 2817) | class RunnableSequence(RunnableSerializable[Input, Output]):
method __init__ (line 2911) | def __init__(
method get_lc_namespace (line 2954) | def get_lc_namespace(cls) -> list[str]:
method steps (line 2963) | def steps(self) -> list[Runnable[Any, Any]]:
method is_lc_serializable (line 2973) | def is_lc_serializable(cls) -> bool:
method InputType (line 2983) | def InputType(self) -> type[Input]:
method OutputType (line 2989) | def OutputType(self) -> type[Output]:
method get_input_schema (line 2994) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method get_output_schema (line 3007) | def get_output_schema(
method config_specs (line 3023) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_graph (line 3036) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method __repr__ (line 3070) | def __repr__(self) -> str:
method __or__ (line 3077) | def __or__(
method __ror__ (line 3104) | def __ror__(
method invoke (line 3131) | def invoke(
method ainvoke (line 3167) | async def ainvoke(
method batch (line 3207) | def batch(
method abatch (line 3335) | async def abatch(
method _transform (line 3465) | def _transform(
method _atransform (line 3488) | async def _atransform(
method transform (line 3514) | def transform(
method stream (line 3528) | def stream(
method atransform (line 3537) | async def atransform(
method astream (line 3552) | async def astream(
class RunnableParallel (line 3565) | class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]):
method __init__ (line 3651) | def __init__(
method is_lc_serializable (line 3679) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 3685) | def get_lc_namespace(cls) -> list[str]:
method get_name (line 3698) | def get_name(self, suffix: str | None = None, *, name: str | None = No...
method InputType (line 3714) | def InputType(self) -> Any:
method get_input_schema (line 3723) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method get_output_schema (line 3758) | def get_output_schema(
method config_specs (line 3775) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_graph (line 3787) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method __repr__ (line 3826) | def __repr__(self) -> str:
method invoke (line 3834) | def invoke(
method ainvoke (line 3894) | async def ainvoke(
method _transform (line 3948) | def _transform(
method transform (line 3996) | def transform(
method stream (line 4007) | def stream(
method _atransform (line 4015) | async def _atransform(
method atransform (line 4067) | async def atransform(
method astream (line 4079) | async def astream(
class RunnableGenerator (line 4096) | class RunnableGenerator(Runnable[Input, Output]):
method __init__ (line 4188) | def __init__(
method InputType (line 4232) | def InputType(self) -> Any:
method get_input_schema (line 4244) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method OutputType (line 4271) | def OutputType(self) -> Any:
method get_output_schema (line 4284) | def get_output_schema(
method __eq__ (line 4311) | def __eq__(self, other: object) -> bool:
method __repr__ (line 4323) | def __repr__(self) -> str:
method transform (line 4327) | def transform(
method stream (line 4345) | def stream(
method invoke (line 4354) | def invoke(
method atransform (line 4363) | def atransform(
method astream (line 4378) | def astream(
method ainvoke (line 4390) | async def ainvoke(
class RunnableLambda (line 4399) | class RunnableLambda(Runnable[Input, Output]):
method __init__ (line 4448) | def __init__(
method __init__ (line 4456) | def __init__(
method __init__ (line 4464) | def __init__(
method __init__ (line 4472) | def __init__(
method __init__ (line 4480) | def __init__(
method __init__ (line 4490) | def __init__(
method __init__ (line 4505) | def __init__(
method __init__ (line 4520) | def __init__(
method __init__ (line 4535) | def __init__(
method __init__ (line 4550) | def __init__(
method __init__ (line 4565) | def __init__(
method __init__ (line 4579) | def __init__(
method InputType (line 4656) | def InputType(self) -> Any:
method get_input_schema (line 4669) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method OutputType (line 4714) | def OutputType(self) -> Any:
method get_output_schema (line 4737) | def get_output_schema(
method deps (line 4764) | def deps(self) -> list[Runnable]:
method config_specs (line 4789) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_graph (line 4795) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method __eq__ (line 4825) | def __eq__(self, other: object) -> bool:
method __repr__ (line 4836) | def __repr__(self) -> str:
method _invoke (line 4851) | def _invoke(
method _ainvoke (line 4896) | async def _ainvoke(
method invoke (line 4997) | def invoke(
method ainvoke (line 5028) | async def ainvoke(
method _transform (line 5052) | def _transform(
method transform (line 5115) | def transform(
method stream (line 5136) | def stream(
method _atransform (line 5144) | async def _atransform(
method atransform (line 5244) | async def atransform(
method astream (line 5259) | async def astream(
class RunnableEachBase (line 5272) | class RunnableEachBase(RunnableSerializable[list[Input], list[Output]]):
method InputType (line 5292) | def InputType(self) -> Any:
method get_input_schema (line 5296) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method OutputType (line 5315) | def OutputType(self) -> type[list[Output]]:
method get_output_schema (line 5319) | def get_output_schema(
method config_specs (line 5338) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_graph (line 5342) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method is_lc_serializable (line 5347) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 5353) | def get_lc_namespace(cls) -> list[str]:
method _invoke (line 5361) | def _invoke(
method invoke (line 5374) | def invoke(
method _ainvoke (line 5379) | async def _ainvoke(
method ainvoke (line 5392) | async def ainvoke(
method astream_events (line 5398) | async def astream_events(
class RunnableEach (line 5413) | class RunnableEach(RunnableEachBase[Input, Output]):
method get_name (line 5444) | def get_name(self, suffix: str | None = None, *, name: str | None = No...
method bind (line 5449) | def bind(self, **kwargs: Any) -> RunnableEach[Input, Output]:
method with_config (line 5453) | def with_config(
method with_listeners (line 5459) | def with_listeners(
method with_alisteners (line 5496) | def with_alisteners(
class RunnableBindingBase (line 5530) | class RunnableBindingBase(RunnableSerializable[Input, Output]): # type:...
method __init__ (line 5577) | def __init__(
method get_name (line 5624) | def get_name(self, suffix: str | None = None, *, name: str | None = No...
method InputType (line 5629) | def InputType(self) -> type[Input]:
method OutputType (line 5638) | def OutputType(self) -> type[Output]:
method get_input_schema (line 5646) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method get_output_schema (line 5652) | def get_output_schema(
method config_specs (line 5661) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_graph (line 5665) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method is_lc_serializable (line 5670) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 5676) | def get_lc_namespace(cls) -> list[str]:
method _merge_configs (line 5684) | def _merge_configs(self, *configs: RunnableConfig | None) -> RunnableC...
method invoke (line 5689) | def invoke(
method ainvoke (line 5702) | async def ainvoke(
method batch (line 5715) | def batch(
method abatch (line 5738) | async def abatch(
method batch_as_completed (line 5761) | def batch_as_completed(
method batch_as_completed (line 5771) | def batch_as_completed(
method batch_as_completed (line 5781) | def batch_as_completed(
method abatch_as_completed (line 5813) | def abatch_as_completed(
method abatch_as_completed (line 5823) | def abatch_as_completed(
method abatch_as_completed (line 5833) | async def abatch_as_completed(
method stream (line 5866) | def stream(
method astream (line 5879) | async def astream(
method astream_events (line 5893) | async def astream_events(
method transform (line 5905) | def transform(
method atransform (line 5918) | async def atransform(
class RunnableBinding (line 5932) | class RunnableBinding(RunnableBindingBase[Input, Output]): # type: igno...
method bind (line 5985) | def bind(self, **kwargs: Any) -> Runnable[Input, Output]:
method with_config (line 6006) | def with_config(
method with_listeners (line 6022) | def with_listeners(
method with_types (line 6075) | def with_types(
method with_retry (line 6094) | def with_retry(self, **kwargs: Any) -> Runnable[Input, Output]:
method __getattr__ (line 6103) | def __getattr__(self, name: str) -> Any: # type: ignore[misc]
class _RunnableCallableSync (line 6140) | class _RunnableCallableSync(Protocol[Input, Output]):
method __call__ (line 6141) | def __call__(self, _in: Input, /, *, config: RunnableConfig) -> Output...
class _RunnableCallableAsync (line 6144) | class _RunnableCallableAsync(Protocol[Input, Output]):
method __call__ (line 6145) | def __call__(
class _RunnableCallableIterator (line 6150) | class _RunnableCallableIterator(Protocol[Input, Output]):
method __call__ (line 6151) | def __call__(
class _RunnableCallableAsyncIterator (line 6156) | class _RunnableCallableAsyncIterator(Protocol[Input, Output]):
method __call__ (line 6157) | def __call__(
function coerce_to_runnable (line 6176) | def coerce_to_runnable(thing: RunnableLike) -> Runnable[Input, Output]:
function chain (line 6204) | def chain(
function chain (line 6210) | def chain(
function chain (line 6216) | def chain(
function chain (line 6222) | def chain(
function chain (line 6227) | def chain(
FILE: libs/core/langchain_core/runnables/branch.py
class RunnableBranch (line 42) | class RunnableBranch(RunnableSerializable[Input, Output]):
method __init__ (line 74) | def __init__(
method is_lc_serializable (line 144) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 150) | def get_lc_namespace(cls) -> list[str]:
method get_input_schema (line 159) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method config_specs (line 177) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method invoke (line 189) | def invoke(
method ainvoke (line 248) | async def ainvoke(
method stream (line 296) | def stream(
method astream (line 380) | async def astream(
FILE: libs/core/langchain_core/runnables/config.py
class EmptyDict (line 45) | class EmptyDict(TypedDict, total=False):
class RunnableConfig (line 49) | class RunnableConfig(TypedDict, total=False):
function _set_config_context (line 150) | def _set_config_context(
function set_config_context (line 194) | def set_config_context(config: RunnableConfig) -> Generator[Context, Non...
function ensure_config (line 225) | def ensure_config(config: RunnableConfig | None = None) -> RunnableConfig:
function get_config_list (line 278) | def get_config_list(
function patch_config (line 324) | def patch_config(
function merge_configs (line 366) | def merge_configs(*configs: RunnableConfig | None) -> RunnableConfig:
function call_func_with_variable_args (line 432) | def call_func_with_variable_args(
function acall_func_with_variable_args (line 464) | def acall_func_with_variable_args(
function get_callback_manager_for_config (line 498) | def get_callback_manager_for_config(config: RunnableConfig) -> CallbackM...
function get_async_callback_manager_for_config (line 514) | def get_async_callback_manager_for_config(
class ContextThreadPoolExecutor (line 536) | class ContextThreadPoolExecutor(ThreadPoolExecutor):
method submit (line 539) | def submit( # type: ignore[override]
method map (line 559) | def map(
function get_executor_for_config (line 589) | def get_executor_for_config(
function run_in_executor (line 607) | async def run_in_executor(
FILE: libs/core/langchain_core/runnables/configurable.py
class DynamicRunnable (line 49) | class DynamicRunnable(RunnableSerializable[Input, Output]):
method is_lc_serializable (line 68) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 74) | def get_lc_namespace(cls) -> list[str]:
method InputType (line 84) | def InputType(self) -> type[Input]:
method OutputType (line 89) | def OutputType(self) -> type[Output]:
method get_input_schema (line 93) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method get_output_schema (line 98) | def get_output_schema(
method get_graph (line 105) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method with_config (line 110) | def with_config(
method prepare (line 120) | def prepare(
method _prepare (line 137) | def _prepare(
method invoke (line 142) | def invoke(
method ainvoke (line 149) | async def ainvoke(
method batch (line 156) | def batch(
method abatch (line 199) | async def abatch(
method stream (line 238) | def stream(
method astream (line 248) | async def astream(
method transform (line 259) | def transform(
method atransform (line 269) | async def atransform(
method __getattr__ (line 280) | def __getattr__(self, name: str) -> Any: # type: ignore[misc]
class RunnableConfigurableFields (line 318) | class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
method config_specs (line 382) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method configurable_fields (line 415) | def configurable_fields(
method _prepare (line 420) | def _prepare(
class StrEnum (line 463) | class StrEnum(str, enum.Enum):
class RunnableConfigurableAlternatives (line 475) | class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
method config_specs (line 553) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method configurable_fields (line 601) | def configurable_fields(
method _prepare (line 612) | def _prepare(
function _strremoveprefix (line 641) | def _strremoveprefix(s: str, prefix: str) -> str:
function prefix_config_spec (line 646) | def prefix_config_spec(
function make_options_spec (line 675) | def make_options_spec(
FILE: libs/core/langchain_core/runnables/fallbacks.py
class RunnableWithFallbacks (line 36) | class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
method InputType (line 112) | def InputType(self) -> type[Input]:
method OutputType (line 117) | def OutputType(self) -> type[Output]:
method get_input_schema (line 121) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method get_output_schema (line 125) | def get_output_schema(
method config_specs (line 132) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method is_lc_serializable (line 141) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 147) | def get_lc_namespace(cls) -> list[str]:
method runnables (line 156) | def runnables(self) -> Iterator[Runnable[Input, Output]]:
method invoke (line 166) | def invoke(
method ainvoke (line 216) | async def ainvoke(
method batch (line 266) | def batch(
method abatch (line 362) | async def abatch(
method stream (line 466) | def stream(
method astream (line 530) | async def astream(
method __getattr__ (line 593) | def __getattr__(self, name: str) -> Any:
function _returns_runnable (line 649) | def _returns_runnable(attr: Any) -> bool:
function _is_runnable_type (line 656) | def _is_runnable_type(type_: Any) -> bool:
FILE: libs/core/langchain_core/runnables/graph.py
class Stringifiable (line 31) | class Stringifiable(Protocol):
method __str__ (line 34) | def __str__(self) -> str:
class LabelsDict (line 38) | class LabelsDict(TypedDict):
function is_uuid (line 47) | def is_uuid(value: str) -> bool:
class Edge (line 63) | class Edge(NamedTuple):
method copy (line 75) | def copy(self, *, source: str | None = None, target: str | None = None...
class Node (line 93) | class Node(NamedTuple):
method copy (line 105) | def copy(
class Branch (line 128) | class Branch(NamedTuple):
class CurveStyle (line 137) | class CurveStyle(Enum):
class NodeStyles (line 155) | class NodeStyles:
class MermaidDrawMethod (line 169) | class MermaidDrawMethod(Enum):
function node_data_str (line 178) | def node_data_str(
function node_data_json (line 197) | def node_data_json(
class Graph (line 253) | class Graph:
method to_json (line 264) | def to_json(self, *, with_schemas: bool = False) -> dict[str, list[dic...
method __bool__ (line 301) | def __bool__(self) -> bool:
method next_id (line 305) | def next_id(self) -> str:
method add_node (line 312) | def add_node(
method remove_node (line 340) | def remove_node(self, node: Node) -> None:
method add_edge (line 351) | def add_edge(
method extend (line 384) | def extend(
method reid (line 422) | def reid(self) -> Graph:
method first_node (line 457) | def first_node(self) -> Node | None:
method last_node (line 469) | def last_node(self) -> Node | None:
method trim_first_node (line 481) | def trim_first_node(self) -> None:
method trim_last_node (line 494) | def trim_last_node(self) -> None:
method draw_ascii (line 507) | def draw_ascii(self) -> str:
method print_ascii (line 521) | def print_ascii(self) -> None:
method draw_png (line 526) | def draw_png(
method draw_png (line 534) | def draw_png(
method draw_png (line 541) | def draw_png(
method draw_mermaid (line 575) | def draw_mermaid(
method draw_mermaid_png (line 630) | def draw_mermaid_png(
function _first_node (line 706) | def _first_node(graph: Graph, exclude: Sequence[str] = ()) -> Node | None:
function _last_node (line 724) | def _last_node(graph: Graph, exclude: Sequence[str] = ()) -> Node | None:
FILE: libs/core/langchain_core/runnables/graph_ascii.py
class VertexViewer (line 27) | class VertexViewer:
method __init__ (line 37) | def __init__(self, name: str) -> None:
method h (line 47) | def h(self) -> int:
method w (line 52) | def w(self) -> int:
class AsciiCanvas (line 57) | class AsciiCanvas:
method __init__ (line 62) | def __init__(self, cols: int, lines: int) -> None:
method draw (line 81) | def draw(self) -> str:
method point (line 90) | def point(self, x: int, y: int, char: str) -> None:
method line (line 117) | def line(self, x0: int, y0: int, x1: int, y1: int, char: str) -> None:
method text (line 149) | def text(self, x: int, y: int, text: str) -> None:
method box (line 160) | def box(self, x0: int, y0: int, width: int, height: int) -> None:
class _EdgeViewer (line 193) | class _EdgeViewer:
method __init__ (line 194) | def __init__(self) -> None:
method setpath (line 197) | def setpath(self, pts: list[tuple[float]]) -> None:
function _build_sugiyama_layout (line 201) | def _build_sugiyama_layout(
function draw_ascii (line 247) | def draw_ascii(vertices: Mapping[str, str], edges: Sequence[LangEdge]) -...
FILE: libs/core/langchain_core/runnables/graph_mermaid.py
function draw_mermaid (line 45) | def draw_mermaid(
function _to_safe_id (line 255) | def _to_safe_id(label: str) -> str:
function _generate_mermaid_graph_styles (line 269) | def _generate_mermaid_graph_styles(node_colors: NodeStyles) -> str:
function draw_mermaid_png (line 277) | def draw_mermaid_png(
function _render_mermaid_using_pyppeteer (line 334) | async def _render_mermaid_using_pyppeteer(
function _render_mermaid_using_api (line 405) | def _render_mermaid_using_api(
FILE: libs/core/langchain_core/runnables/graph_png.py
class PngDrawer (line 16) | class PngDrawer:
method __init__ (line 28) | def __init__(
method get_node_label (line 54) | def get_node_label(self, label: str) -> str:
method get_edge_label (line 66) | def get_edge_label(self, label: str) -> str:
method add_node (line 78) | def add_node(self, viz: Any, node: str) -> None:
method add_edge (line 94) | def add_edge(
method draw (line 120) | def draw(self, graph: Graph, output_path: str | None = None) -> bytes ...
method add_nodes (line 156) | def add_nodes(self, viz: Any, graph: Graph) -> None:
method add_subgraph (line 166) | def add_subgraph(
method add_edges (line 192) | def add_edges(self, viz: Any, graph: Graph) -> None:
method update_styles (line 205) | def update_styles(viz: Any, graph: Graph) -> None:
FILE: libs/core/langchain_core/runnables/history.py
class RunnableWithMessageHistory (line 38) | class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[n...
method __init__ (line 249) | def __init__(
method config_specs (line 377) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_input_schema (line 384) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method OutputType (line 407) | def OutputType(self) -> type[Output]:
method get_output_schema (line 411) | def get_output_schema(
method _get_input_messages (line 443) | def _get_input_messages(
method _get_output_messages (line 481) | def _get_output_messages(
method _enter_history (line 512) | def _enter_history(self, value: Any, config: RunnableConfig) -> list[B...
method _aenter_history (line 524) | async def _aenter_history(
method _exit_history (line 538) | def _exit_history(self, run: Run, config: RunnableConfig) -> None:
method _aexit_history (line 555) | async def _aexit_history(self, run: Run, config: RunnableConfig) -> None:
method _merge_configs (line 572) | def _merge_configs(self, *configs: RunnableConfig | None) -> RunnableC...
function _get_parameter_names (line 619) | def _get_parameter_names(callable_: GetSessionHistoryCallable) -> list[s...
FILE: libs/core/langchain_core/runnables/passthrough.py
function identity (line 50) | def identity(x: Other) -> Other:
function aidentity (line 62) | async def aidentity(x: Other) -> Other:
class RunnablePassthrough (line 74) | class RunnablePassthrough(RunnableSerializable[Other, Other]):
method __repr_args__ (line 148) | def __repr_args__(self) -> Any:
method __init__ (line 153) | def __init__(
method is_lc_serializable (line 182) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 187) | def get_lc_namespace(cls) -> list[str]:
method InputType (line 197) | def InputType(self) -> Any:
method OutputType (line 202) | def OutputType(self) -> Any:
method assign (line 207) | def assign(
method invoke (line 226) | def invoke(
method ainvoke (line 236) | async def ainvoke(
method transform (line 253) | def transform(
method atransform (line 284) | async def atransform(
method stream (line 327) | def stream(
method astream (line 336) | async def astream(
class RunnableAssign (line 352) | class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
method __init__ (line 394) | def __init__(self, mapper: RunnableParallel[dict[str, Any]], **kwargs:...
method is_lc_serializable (line 405) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 411) | def get_lc_namespace(cls) -> list[str]:
method get_name (line 420) | def get_name(self, suffix: str | None = None, *, name: str | None = No...
method get_input_schema (line 429) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method get_output_schema (line 438) | def get_output_schema(
method config_specs (line 464) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method get_graph (line 468) | def get_graph(self, config: RunnableConfig | None = None) -> Graph:
method _invoke (line 480) | def _invoke(
method invoke (line 501) | def invoke(
method _ainvoke (line 509) | async def _ainvoke(
method ainvoke (line 530) | async def ainvoke(
method _transform (line 538) | def _transform(
method transform (line 585) | def transform(
method _atransform (line 595) | async def _atransform(
method atransform (line 637) | async def atransform(
method stream (line 649) | def stream(
method astream (line 658) | async def astream(
class RunnablePick (line 671) | class RunnablePick(RunnableSerializable[dict[str, Any], Any]):
method __init__ (line 710) | def __init__(self, keys: str | list[str], **kwargs: Any) -> None:
method is_lc_serializable (line 720) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 726) | def get_lc_namespace(cls) -> list[str]:
method get_name (line 735) | def get_name(self, suffix: str | None = None, *, name: str | None = No...
method _pick (line 744) | def _pick(self, value: dict[str, Any]) -> Any:
method invoke (line 757) | def invoke(
method _ainvoke (line 765) | async def _ainvoke(
method ainvoke (line 772) | async def ainvoke(
method _transform (line 780) | def _transform(
method transform (line 790) | def transform(
method _atransform (line 800) | async def _atransform(
method atransform (line 810) | async def atransform(
method stream (line 822) | def stream(
method astream (line 831) | async def astream(
FILE: libs/core/langchain_core/runnables/retry.py
class ExponentialJitterParams (line 35) | class ExponentialJitterParams(TypedDict, total=False):
class RunnableRetry (line 48) | class RunnableRetry(RunnableBindingBase[Input, Output]): # type: ignore...
method _kwargs_retrying (line 136) | def _kwargs_retrying(self) -> dict[str, Any]:
method _sync_retrying (line 152) | def _sync_retrying(self, **kwargs: Any) -> Retrying:
method _async_retrying (line 155) | def _async_retrying(self, **kwargs: Any) -> AsyncRetrying:
method _patch_config (line 159) | def _patch_config(
method _patch_config_list (line 168) | def _patch_config_list(
method _invoke (line 179) | def _invoke(
method invoke (line 198) | def invoke(
method _ainvoke (line 203) | async def _ainvoke(
method ainvoke (line 222) | async def ainvoke(
method _batch (line 227) | def _batch(
method batch (line 291) | def batch(
method _abatch (line 303) | async def _abatch(
method abatch (line 366) | async def abatch(
FILE: libs/core/langchain_core/runnables/router.py
class RouterInput (line 37) | class RouterInput(TypedDict):
class RouterRunnable (line 46) | class RouterRunnable(RunnableSerializable[RouterInput, Output]):
method config_specs (line 68) | def config_specs(self) -> list[ConfigurableFieldSpec]:
method __init__ (line 73) | def __init__(
method is_lc_serializable (line 92) | def is_lc_serializable(cls) -> bool:
method get_lc_namespace (line 98) | def get_lc_namespace(cls) -> list[str]:
method invoke (line 107) | def invoke(
method ainvoke (line 120) | async def ainvoke(
method batch (line 136) | def batch(
method abatch (line 173) | async def abatch(
method stream (line 209) | def stream(
method astream (line 225) | async def astream(
FILE: libs/core/langchain_core/runnables/schema.py
class EventData (line 13) | class EventData(TypedDict, total=False):
class BaseStreamEvent (line 56) | class BaseStreamEvent(TypedDict):
class StandardStreamEvent (line 164) | class StandardStreamEvent(BaseStreamEvent):
class CustomStreamEvent (line 176) | class CustomStreamEvent(BaseStreamEvent):
FILE: libs/core/langchain_core/runnables/utils.py
function gated_coro (line 49) | async def gated_coro(semaphore: asyncio.Semaphore, coro: Coroutine) -> Any:
function gather_with_concurrency (line 63) | async def gather_with_concurrency(n: int | None, *coros: Coroutine) -> l...
function accepts_run_manager (line 81) | def accepts_run_manager(callable: Callable[..., Any]) -> bool: # noqa: ...
function accepts_config (line 96) | def accepts_config(callable: Callable[..., Any]) -> bool: # noqa: A002
function accepts_context (line 111) | def accepts_context(callable: Callable[..., Any]) -> bool: # noqa: A002
function asyncio_accepts_context (line 126) | def asyncio_accepts_context() -> bool:
function coro_with_context (line 138) | def coro_with_context(
class IsLocalDict (line 158) | class IsLocalDict(ast.NodeVisitor):
method __init__ (line 161) | def __init__(self, name: str, keys: set[str]) -> None:
method visit_Subscript (line 172) | def visit_Subscript(self, node: ast.Subscript) -> None:
method visit_Call (line 189) | def visit_Call(self, node: ast.Call) -> None:
class IsFunctionArgDict (line 208) | class IsFunctionArgDict(ast.NodeVisitor):
method __init__ (line 211) | def __init__(self) -> None:
method visit_Lambda (line 216) | def visit_Lambda(self, node: ast.Lambda) -> None:
method visit_FunctionDef (line 228) | def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
method visit_AsyncFunctionDef (line 240) | def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
class NonLocals (line 252) | class NonLocals(ast.NodeVisitor):
method __init__ (line 255) | def __init__(self) -> None:
method visit_Name (line 261) | def visit_Name(self, node: ast.Name) -> None:
method visit_Attribute (line 273) | def visit_Attribute(self, node: ast.Attribute) -> None:
class FunctionNonLocals (line 304) | class FunctionNonLocals(ast.NodeVisitor):
method __init__ (line 307) | def __init__(self) -> None:
method visit_FunctionDef (line 312) | def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
method visit_AsyncFunctionDef (line 323) | def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
method visit_Lambda (line 334) | def visit_Lambda(self, node: ast.Lambda) -> None:
class GetLambdaSource (line 345) | class GetLambdaSource(ast.NodeVisitor):
method __init__ (line 348) | def __init__(self) -> None:
method visit_Lambda (line 354) | def visit_Lambda(self, node: ast.Lambda) -> None:
function get_function_first_arg_dict_keys (line 365) | def get_function_first_arg_dict_keys(func: Callable) -> list[str] | None:
function get_lambda_source (line 384) | def get_lambda_source(func: Callable) -> str | None:
function get_function_nonlocals (line 408) | def get_function_nonlocals(func: Callable) -> list[Any]:
function indent_lines_after_first (line 450) | def indent_lines_after_first(text: str, prefix: str) -> str:
class AddableDict (line 466) | class AddableDict(dict[str, Any]):
method __add__ (line 469) | def __add__(self, other: AddableDict) -> AddableDict:
method __radd__ (line 490) | def __radd__(self, other: AddableDict) -> AddableDict:
class SupportsAdd (line 516) | class SupportsAdd(Protocol[_T_contra, _T_co]):
method __add__ (line 519) | def __add__(self, x: _T_contra, /) -> _T_co:
function add (line 526) | def add(addables: Iterable[Addable]) -> Addable | None:
function aadd (line 541) | async def aadd(addables: AsyncIterable[Addable]) -> Addable | None:
class ConfigurableField (line 556) | class ConfigurableField(NamedTuple):
method __hash__ (line 571) | def __hash__(self) -> int:
class ConfigurableFieldSingleOption (line 575) | class ConfigurableFieldSingleOption(NamedTuple):
method __hash__ (line 592) | def __hash__(self) -> int:
class ConfigurableFieldMultiOption (line 596) | class ConfigurableFieldMultiOption(NamedTuple):
method __hash__ (line 613) | def __hash__(self) -> int:
class ConfigurableFieldSpec (line 622) | class ConfigurableFieldSpec(NamedTuple):
function get_unique_config_specs (line 641) | def get_unique_config_specs(
class _RootEventFilter (line 673) | class _RootEventFilter:
method __init__ (line 674) | def __init__(
method include_event (line 696) | def include_event(self, event: StreamEvent, root_type: str) -> bool:
function is_async_generator (line 728) | def is_async_generator(
function is_async_callable (line 745) | def is_async_callable(
FILE: libs/core/langchain_core/stores.py
class BaseStore (line 26) | class BaseStore(ABC, Generic[K, V]):
method mget (line 81) | def mget(self, keys: Sequence[K]) -> list[V | None]:
method amget (line 92) | async def amget(self, keys: Sequence[K]) -> list[V | None]:
method mset (line 105) | def mset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None:
method amset (line 112) | async def amset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None:
method mdelete (line 121) | def mdelete(self, keys: Sequence[K]) -> None:
method amdelete (line 128) | async def amdelete(self, keys: Sequence[K]) -> None:
method yield_keys (line 137) | def yield_keys(self, *, prefix: str | None = None) -> Iterator[K] | It...
method ayield_keys (line 150) | async def ayield_keys(
class InMemoryBaseStore (line 176) | class InMemoryBaseStore(BaseStore[str, V], Generic[V]):
method __init__ (line 179) | def __init__(self) -> None:
method mget (line 184) | def mget(self, keys: Sequence[str]) -> list[V | None]:
method amget (line 188) | async def amget(self, keys: Sequence[str]) -> list[V | None]:
method mset (line 192) | def mset(self, key_value_pairs: Sequence[tuple[str, V]]) -> None:
method amset (line 197) | async def amset(self, key_value_pairs: Sequence[tuple[str, V]]) -> None:
method mdelete (line 201) | def mdelete(self, keys: Sequence[str]) -> None:
method amdelete (line 207) | async def amdelete(self, keys: Sequence[str]) -> None:
method yield_keys (line 210) | def yield_keys(self, *, prefix: str | None = None) -> Iterator[str]:
method ayield_keys (line 226) | async def ayield_keys(self, *, prefix: str | None = None) -> AsyncIter...
class InMemoryStore (line 244) | class InMemoryStore(InMemoryBaseStore[Any]):
class InMemoryByteStore (line 267) | class InMemoryByteStore(InMemoryBaseStore[bytes]):
class InvalidKeyException (line 290) | class InvalidKeyException(LangChainException):
FILE: libs/core/langchain_core/structured_query.py
class Visitor (line 15) | class Visitor(ABC):
method _validate_func (line 24) | def _validate_func(self, func: Operator | Comparator) -> None:
method visit_operation (line 47) | def visit_operation(self, operation: Operation) -> Any:
method visit_comparison (line 55) | def visit_comparison(self, comparison: Comparison) -> Any:
method visit_structured_query (line 63) | def visit_structured_query(self, structured_query: StructuredQuery) ->...
function _to_snake_case (line 71) | def _to_snake_case(name: str) -> str:
class Expr (line 82) | class Expr(BaseModel):
method accept (line 85) | def accept(self, visitor: Visitor) -> Any:
class Operator (line 99) | class Operator(str, Enum):
class Comparator (line 107) | class Comparator(str, Enum):
class FilterDirective (line 122) | class FilterDirective(Expr, ABC):
class Comparison (line 126) | class Comparison(FilterDirective):
method __init__ (line 138) | def __init__(
class Operation (line 154) | class Operation(FilterDirective):
method __init__ (line 163) | def __init__(
class StructuredQuery (line 176) | class StructuredQuery(Expr):
method __init__ (line 188) | def __init__(
FILE: libs/core/langchain_core/sys_info.py
function _get_sub_deps (line 11) | def _get_sub_deps(packages: Sequence[str]) -> list[str]:
function print_sys_info (line 36) | def print_sys_info(*, additional_pkgs: Sequence[str] = ()) -> None:
FILE: libs/core/langchain_core/tools/__init__.py
function __getattr__ (line 87) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 94) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/tools/base.py
class SchemaAnnotationError (line 89) | class SchemaAnnotationError(TypeError):
function _is_annotated_type (line 93) | def _is_annotated_type(typ: type[Any]) -> bool:
function _get_annotation_description (line 105) | def _get_annotation_description(arg_type: type) -> str | None:
function _get_filtered_args (line 126) | def _get_filtered_args(
function _parse_python_function_docstring (line 155) | def _parse_python_function_docstring(
function _validate_docstring_args_against_annotations (line 178) | def _validate_docstring_args_against_annotations(
function _infer_arg_descriptions (line 196) | def _infer_arg_descriptions(
function _is_pydantic_annotation (line 230) | def _is_pydantic_annotation(annotation: Any, pydantic_version: str = "v2...
function _function_annotations_are_pydantic_v1 (line 247) | def _function_annotations_are_pydantic_v1(
class _SchemaConfig (line 279) | class _SchemaConfig:
function create_schema_from_function (line 289) | def create_schema_from_function(
class ToolException (line 390) | class ToolException(Exception): # noqa: N818
class BaseTool (line 405) | class BaseTool(RunnableSerializable[str | dict | ToolCall, Any]):
method __init_subclass__ (line 413) | def __init_subclass__(cls, **kwargs: Any) -> None:
method __init__ (line 533) | def __init__(self, **kwargs: Any) -> None:
method is_single_input (line 558) | def is_single_input(self) -> bool:
method args (line 568) | def args(self) -> dict:
method tool_call_schema (line 587) | def tool_call_schema(self) -> ArgsSchema:
method _injected_args_keys (line 612) | def _injected_args_keys(self) -> frozenset[str]:
method get_input_schema (line 619) | def get_input_schema(self, config: RunnableConfig | None = None) -> ty...
method invoke (line 635) | def invoke(
method ainvoke (line 645) | async def ainvoke(
method _parse_input (line 656) | def _parse_input(
method _run (line 778) | def _run(self, *args: Any, **kwargs: Any) -> Any:
method _arun (line 788) | async def _arun(self, *args: Any, **kwargs: Any) -> Any:
method _filter_injected_args (line 803) | def _filter_injected_args(self, tool_input: dict) -> dict:
method _to_args_and_kwargs (line 839) | def _to_args_and_kwargs(
method run (line 878) | def run(
method arun (line 1006) | async def arun(
function _is_tool_call (line 1138) | def _is_tool_call(x: Any) -> bool:
function _handle_validation_error (line 1150) | def _handle_validation_error(
function _handle_tool_error (line 1182) | def _handle_tool_error(
function _prep_run_args (line 1214) | def _prep_run_args(
function _format_output (line 1251) | def _format_output(
function _is_message_content_type (line 1283) | def _is_message_content_type(obj: Any) -> bool:
function _is_message_content_block (line 1299) | def _is_message_content_block(obj: Any) -> bool:
function _stringify (line 1317) | def _stringify(content: Any) -> str:
function _get_type_hints (line 1332) | def _get_type_hints(func: Callable) -> dict[str, type] | None:
function _get_runnable_config_param (line 1349) | def _get_runnable_config_param(func: Callable) -> str | None:
class InjectedToolArg (line 1367) | class InjectedToolArg:
class _DirectlyInjectedToolArg (line 1375) | class _DirectlyInjectedToolArg:
class InjectedToolCallId (line 1397) | class InjectedToolCallId(InjectedToolArg):
function _is_directly_injected_arg_type (line 1423) | def _is_directly_injected_arg_type(type_: Any) -> bool:
function _is_injected_arg_type (line 1443) | def _is_injected_arg_type(
function get_all_basemodel_annotations (line 1471) | def get_all_basemodel_annotations(
function _replace_type_vars (line 1541) | def _replace_type_vars(
class BaseToolkit (line 1573) | class BaseToolkit(BaseModel, ABC):
method get_tools (line 1581) | def get_tools(self) -> list[BaseTool]:
FILE: libs/core/langchain_core/tools/convert.py
function tool (line 17) | def tool(
function tool (line 31) | def tool(
function tool (line 47) | def tool(
function tool (line 62) | def tool(
function tool (line 76) | def tool(
function _get_description_from_runnable (line 393) | def _get_description_from_runnable(runnable: Runnable) -> str:
function _get_schema_from_runnable_and_arg_types (line 399) | def _get_schema_from_runnable_and_arg_types(
function convert_runnable_to_tool (line 419) | def convert_runnable_to_tool(
FILE: libs/core/langchain_core/tools/render.py
function render_text_description (line 13) | def render_text_description(tools: list[BaseTool]) -> str:
function render_text_description_and_args (line 41) | def render_text_description_and_args(tools: list[BaseTool]) -> str:
FILE: libs/core/langchain_core/tools/retriever.py
class RetrieverInput (line 25) | class RetrieverInput(BaseModel):
function create_retriever_tool (line 31) | def create_retriever_tool(
FILE: libs/core/langchain_core/tools/simple.py
class Tool (line 31) | class Tool(BaseTool):
method ainvoke (line 45) | async def ainvoke(
method args (line 60) | def args(self) -> dict:
method _to_args_and_kwargs (line 72) | def _to_args_and_kwargs(
method _run (line 99) | def _run(
method _arun (line 126) | async def _arun(
method __init__ (line 158) | def __init__(
method from_function (line 165) | def from_function(
FILE: libs/core/langchain_core/tools/structured.py
class StructuredTool (line 40) | class StructuredTool(BaseTool):
method ainvoke (line 60) | async def ainvoke(
method _run (line 74) | def _run(
method _arun (line 101) | async def _arun(
method from_function (line 133) | def from_function(
method _injected_args_keys (line 255) | def _injected_args_keys(self) -> frozenset[str]:
function _filter_schema_args (line 266) | def _filter_schema_args(func: Callable) -> list[str]:
FILE: libs/core/langchain_core/tracers/__init__.py
function __getattr__ (line 42) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 49) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/tracers/_compat.py
function run_to_dict (line 24) | def run_to_dict(run: Run, **kwargs: Any) -> dict[str, Any]:
function run_copy (line 39) | def run_copy(run: Run, **kwargs: Any) -> Run:
function run_construct (line 54) | def run_construct(**kwargs: Any) -> Run:
function pydantic_to_dict (line 68) | def pydantic_to_dict(obj: Any, **kwargs: Any) -> dict[str, Any]:
function pydantic_copy (line 83) | def pydantic_copy(obj: T, **kwargs: Any) -> T:
FILE: libs/core/langchain_core/tracers/_streaming.py
class _StreamingCallbackHandler (line 12) | class _StreamingCallbackHandler(typing.Protocol[T]):
method tap_output_aiter (line 22) | def tap_output_aiter(
method tap_output_iter (line 27) | def tap_output_iter(self, run_id: UUID, output: Iterator[T]) -> Iterat...
FILE: libs/core/langchain_core/tracers/base.py
class BaseTracer (line 33) | class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
method _persist_run (line 37) | def _persist_run(self, run: Run) -> None:
method _start_trace (line 40) | def _start_trace(self, run: Run) -> None:
method _end_trace (line 45) | def _end_trace(self, run: Run) -> None:
method on_chat_model_start (line 52) | def on_chat_model_start(
method on_llm_start (line 99) | def on_llm_start(
method on_llm_new_token (line 141) | def on_llm_new_token(
method on_retry (line 176) | def on_retry(
method on_llm_end (line 199) | def on_llm_end(self, response: LLMResult, *, run_id: UUID, **kwargs: A...
method on_llm_error (line 225) | def on_llm_error(
method on_chain_start (line 252) | def on_chain_start(
method on_chain_end (line 297) | def on_chain_end(
method on_chain_error (line 326) | def on_chain_error(
method on_tool_start (line 354) | def on_tool_start(
method on_tool_end (line 399) | def on_tool_end(self, output: Any, *, run_id: UUID, **kwargs: Any) -> ...
method on_tool_error (line 419) | def on_tool_error(
method on_retriever_start (line 444) | def on_retriever_start(
method on_retriever_error (line 486) | def on_retriever_error(
method on_retriever_end (line 512) | def on_retriever_end(
method __deepcopy__ (line 533) | def __deepcopy__(self, memo: dict) -> BaseTracer:
method __copy__ (line 537) | def __copy__(self) -> BaseTracer:
class AsyncBaseTracer (line 542) | class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
method _persist_run (line 547) | async def _persist_run(self, run: Run) -> None:
method _start_trace (line 551) | async def _start_trace(self, run: Run) -> None:
method _end_trace (line 562) | async def _end_trace(self, run: Run) -> None:
method on_chat_model_start (line 574) | async def on_chat_model_start(
method on_llm_start (line 604) | async def on_llm_start(
method on_llm_new_token (line 628) | async def on_llm_new_token(
method on_retry (line 646) | async def on_retry(
method on_llm_end (line 659) | async def on_llm_end(
method on_llm_error (line 684) | async def on_llm_error(
method on_chain_start (line 701) | async def on_chain_start(
method on_chain_end (line 729) | async def on_chain_end(
method on_chain_error (line 746) | async def on_chain_error(
method on_tool_start (line 763) | async def on_tool_start(
method on_tool_end (line 790) | async def on_tool_end(
method on_tool_error (line 805) | async def on_tool_error(
method on_retriever_start (line 822) | async def on_retriever_start(
method on_retriever_error (line 850) | async def on_retriever_error(
method on_retriever_end (line 870) | async def on_retriever_end(
method _on_run_create (line 886) | async def _on_run_create(self, run: Run) -> None:
method _on_run_update (line 889) | async def _on_run_update(self, run: Run) -> None:
method _on_llm_start (line 892) | async def _on_llm_start(self, run: Run) -> None:
method _on_llm_end (line 895) | async def _on_llm_end(self, run: Run) -> None:
method _on_llm_error (line 898) | async def _on_llm_error(self, run: Run) -> None:
method _on_llm_new_token (line 901) | async def _on_llm_new_token(
method _on_chain_start (line 909) | async def _on_chain_start(self, run: Run) -> None:
method _on_chain_end (line 912) | async def _on_chain_end(self, run: Run) -> None:
method _on_chain_error (line 915) | async def _on_chain_error(self, run: Run) -> None:
method _on_tool_start (line 918) | async def _on_tool_start(self, run: Run) -> None:
method _on_tool_end (line 921) | async def _on_tool_end(self, run: Run) -> None:
method _on_tool_error (line 924) | async def _on_tool_error(self, run: Run) -> None:
method _on_chat_model_start (line 927) | async def _on_chat_model_start(self, run: Run) -> None:
method _on_retriever_start (line 930) | async def _on_retriever_start(self, run: Run) -> None:
method _on_retriever_end (line 933) | async def _on_retriever_end(self, run: Run) -> None:
method _on_retriever_error (line 936) | async def _on_retriever_error(self, run: Run) -> None:
FILE: libs/core/langchain_core/tracers/context.py
function tracing_v2_enabled (line 40) | def tracing_v2_enabled(
function collect_runs (line 86) | def collect_runs() -> Generator[RunCollectorCallbackHandler, None, None]:
function _get_trace_callbacks (line 105) | def _get_trace_callbacks(
function _tracing_v2_is_enabled (line 132) | def _tracing_v2_is_enabled() -> bool | Literal["local"]:
function _get_tracer_project (line 138) | def _get_tracer_project() -> str:
function register_configure_hook (line 171) | def register_configure_hook(
FILE: libs/core/langchain_core/tracers/core.py
class _TracerCore (line 40) | class _TracerCore(ABC):
method __init__ (line 48) | def __init__(
method _persist_run (line 87) | def _persist_run(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _add_child_run (line 91) | def _add_child_run(
method _get_stacktrace (line 99) | def _get_stacktrace(error: BaseException) -> str:
method _start_trace (line 108) | def _start_trace(self, run: Run) -> Coroutine[Any, Any, None] | None: ...
method _get_run (line 132) | def _get_run(self, run_id: UUID, run_type: str | set[str] | None = Non...
method _create_chat_model_run (line 151) | def _create_chat_model_run(
method _create_llm_run (line 195) | def _create_llm_run(
method _llm_run_with_token_event (line 224) | def _llm_run_with_token_event(
method _llm_run_with_retry_event (line 246) | def _llm_run_with_retry_event(
method _complete_llm_run (line 275) | def _complete_llm_run(self, response: LLMResult, run_id: UUID) -> Run:
method _errored_llm_run (line 305) | def _errored_llm_run(
method _create_chain_run (line 329) | def _create_chain_run(
method _get_chain_inputs (line 359) | def _get_chain_inputs(self, inputs: Any) -> Any:
method _get_chain_outputs (line 370) | def _get_chain_outputs(self, outputs: Any) -> Any:
method _complete_chain_run (line 381) | def _complete_chain_run(
method _errored_chain_run (line 401) | def _errored_chain_run(
method _create_tool_run (line 415) | def _create_tool_run(
method _complete_tool_run (line 455) | def _complete_tool_run(
method _errored_tool_run (line 470) | def _errored_tool_run(
method _create_retrieval_run (line 482) | def _create_retrieval_run(
method _complete_retrieval_run (line 511) | def _complete_retrieval_run(
method _errored_retrieval_run (line 528) | def _errored_retrieval_run(
method __deepcopy__ (line 539) | def __deepcopy__(self, memo: dict) -> _TracerCore:
method __copy__ (line 543) | def __copy__(self) -> _TracerCore:
method _end_trace (line 547) | def _end_trace(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_run_create (line 556) | def _on_run_create(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_run_update (line 565) | def _on_run_update(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_llm_start (line 574) | def _on_llm_start(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_llm_new_token (line 583) | def _on_llm_new_token(
method _on_llm_end (line 599) | def _on_llm_end(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_llm_error (line 608) | def _on_llm_error(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_chain_start (line 617) | def _on_chain_start(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_chain_end (line 626) | def _on_chain_end(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_chain_error (line 635) | def _on_chain_error(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_tool_start (line 644) | def _on_tool_start(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_tool_end (line 653) | def _on_tool_end(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_tool_error (line 662) | def _on_tool_error(self, run: Run) -> Coroutine[Any, Any, None] | None:
method _on_chat_model_start (line 671) | def _on_chat_model_start(self, run: Run) -> Coroutine[Any, Any, None] ...
method _on_retriever_start (line 680) | def _on_retriever_start(self, run: Run) -> Coroutine[Any, Any, None] |...
method _on_retriever_end (line 689) | def _on_retriever_end(self, run: Run) -> Coroutine[Any, Any, None] | N...
method _on_retriever_error (line 698) | def _on_retriever_error(self, run: Run) -> Coroutine[Any, Any, None] |...
FILE: libs/core/langchain_core/tracers/evaluation.py
function wait_for_all_evaluators (line 31) | def wait_for_all_evaluators() -> None:
class EvaluatorCallbackHandler (line 38) | class EvaluatorCallbackHandler(BaseTracer):
method __init__ (line 72) | def __init__(
method _evaluate_in_project (line 118) | def _evaluate_in_project(self, run: Run, evaluator: langsmith.RunEvalu...
method _select_eval_results (line 163) | def _select_eval_results(
method _log_evaluation_feedback (line 178) | def _log_evaluation_feedback(
method _persist_run (line 205) | def _persist_run(self, run: Run) -> None:
method wait_for_futures (line 224) | def wait_for_futures(self) -> None:
FILE: libs/core/langchain_core/tracers/event_stream.py
class RunInfo (line 58) | class RunInfo(TypedDict):
function _assign_name (line 86) | def _assign_name(name: str | None, serialized: dict[str, Any] | None) ->...
class _AstreamEventsCallbackHandler (line 101) | class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCall...
method __init__ (line 104) | def __init__(
method _get_parent_ids (line 148) | def _get_parent_ids(self, run_id: UUID) -> list[str]:
method _send (line 167) | def _send(self, event: StreamEvent, event_type: str) -> None:
method __aiter__ (line 172) | def __aiter__(self) -> AsyncIterator[Any]:
method tap_output_aiter (line 180) | async def tap_output_aiter(
method tap_output_iter (line 235) | def tap_output_iter(self, run_id: UUID, output: Iterator[T]) -> Iterat...
method _write_run_start_info (line 285) | def _write_run_start_info(
method on_chat_model_start (line 319) | async def on_chat_model_start(
method on_llm_start (line 361) | async def on_llm_start(
method on_custom_event (line 405) | async def on_custom_event(
method on_llm_new_token (line 428) | async def on_llm_new_token(
method on_llm_end (line 489) | async def on_llm_end(
method on_chain_start (line 549) | async def on_chain_start(
method on_chain_end (line 598) | async def on_chain_end(
method _get_tool_run_info_with_inputs (line 632) | def _get_tool_run_info_with_inputs(self, run_id: UUID) -> tuple[RunInf...
method on_tool_start (line 655) | async def on_tool_start(
method on_tool_error (line 698) | async def on_tool_error(
method on_tool_end (line 731) | async def on_tool_end(self, output: Any, *, run_id: UUID, **kwargs: An...
method on_retriever_start (line 752) | async def on_retriever_start(
method on_retriever_end (line 796) | async def on_retriever_end(
method __deepcopy__ (line 818) | def __deepcopy__(self, memo: dict) -> _AstreamEventsCallbackHandler:
method __copy__ (line 822) | def __copy__(self) -> _AstreamEventsCallbackHandler:
function _astream_events_implementation_v1 (line 827) | async def _astream_events_implementation_v1(
function _astream_events_implementation_v2 (line 1003) | async def _astream_events_implementation_v2(
FILE: libs/core/langchain_core/tracers/langchain.py
function log_error_once (line 38) | def log_error_once(method: str, exception: Exception) -> None:
function wait_for_all_tracers (line 51) | def wait_for_all_tracers() -> None:
function get_client (line 57) | def get_client() -> Client:
function _get_executor (line 66) | def _get_executor() -> ThreadPoolExecutor:
function _get_usage_metadata_from_generations (line 74) | def _get_usage_metadata_from_generations(
function _get_usage_metadata_from_message (line 104) | def _get_usage_metadata_from_message(message: Any) -> UsageMetadata | None:
class LangChainTracer (line 116) | class LangChainTracer(BaseTracer):
method __init__ (line 121) | def __init__(
method _start_trace (line 154) | def _start_trace(self, run: Run) -> None:
method on_chat_model_start (line 169) | def on_chat_model_start(
method _persist_run (line 215) | def _persist_run(self, run: Run) -> None:
method get_run_url (line 225) | def get_run_url(self) -> str:
method _get_tags (line 253) | def _get_tags(self, run: Run) -> list[str]:
method _persist_run_single (line 259) | def _persist_run_single(self, run: Run) -> None:
method _update_run_single (line 275) | def _update_run_single(run: Run) -> None:
method _on_llm_start (line 286) | def _on_llm_start(self, run: Run) -> None:
method _llm_run_with_token_event (line 293) | def _llm_run_with_token_event(
method _on_chat_model_start (line 313) | def _on_chat_model_start(self, run: Run) -> None:
method _on_llm_end (line 325) | def _on_llm_end(self, run: Run) -> None:
method _on_llm_error (line 338) | def _on_llm_error(self, run: Run) -> None:
method _on_chain_start (line 342) | def _on_chain_start(self, run: Run) -> None:
method _on_chain_end (line 351) | def _on_chain_end(self, run: Run) -> None:
method _on_chain_error (line 360) | def _on_chain_error(self, run: Run) -> None:
method _on_tool_start (line 369) | def _on_tool_start(self, run: Run) -> None:
method _on_tool_end (line 375) | def _on_tool_end(self, run: Run) -> None:
method _on_tool_error (line 379) | def _on_tool_error(self, run: Run) -> None:
method _on_retriever_start (line 383) | def _on_retriever_start(self, run: Run) -> None:
method _on_retriever_end (line 389) | def _on_retriever_end(self, run: Run) -> None:
method _on_retriever_error (line 393) | def _on_retriever_error(self, run: Run) -> None:
method wait_for_futures (line 397) | def wait_for_futures(self) -> None:
FILE: libs/core/langchain_core/tracers/log_stream.py
class LogEntry (line 40) | class LogEntry(TypedDict):
class RunState (line 83) | class RunState(TypedDict):
class RunLogPatch (line 115) | class RunLogPatch:
method __init__ (line 128) | def __init__(self, *ops: dict[str, Any]) -> None:
method __add__ (line 136) | def __add__(self, other: RunLogPatch | Any) -> RunLog:
method __repr__ (line 157) | def __repr__(self) -> str:
method __eq__ (line 162) | def __eq__(self, other: object) -> bool:
class RunLog (line 168) | class RunLog(RunLogPatch):
method __init__ (line 174) | def __init__(self, *ops: dict[str, Any], state: RunState) -> None:
method __add__ (line 184) | def __add__(self, other: RunLogPatch | Any) -> RunLog:
method __repr__ (line 205) | def __repr__(self) -> str:
method __eq__ (line 209) | def __eq__(self, other: object) -> bool:
class LogStreamCallbackHandler (line 232) | class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
method __init__ (line 235) | def __init__(
method __aiter__ (line 302) | def __aiter__(self) -> AsyncIterator[RunLogPatch]:
method send (line 310) | def send(self, *ops: dict[str, Any]) -> bool:
method tap_output_aiter (line 326) | async def tap_output_aiter(
method tap_output_iter (line 359) | def tap_output_iter(self, run_id: UUID, output: Iterator[T]) -> Iterat...
method include_run (line 390) | def include_run(self, run: Run) -> bool:
method _persist_run (line 429) | def _persist_run(self, run: Run) -> None:
method _on_run_create (line 434) | def _on_run_create(self, run: Run) -> None:
method _on_run_update (line 491) | def _on_run_update(self, run: Run) -> None:
method _on_llm_new_token (line 537) | def _on_llm_new_token(
function _get_standardized_inputs (line 565) | def _get_standardized_inputs(
function _get_standardized_outputs (line 606) | def _get_standardized_outputs(
function _astream_log_implementation (line 639) | def _astream_log_implementation(
function _astream_log_implementation (line 652) | def _astream_log_implementation(
function _astream_log_implementation (line 664) | async def _astream_log_implementation(
FILE: libs/core/langchain_core/tracers/memory_stream.py
class _SendStream (line 19) | class _SendStream(Generic[T]):
method __init__ (line 20) | def __init__(
method send (line 38) | async def send(self, item: T) -> None:
method send_nowait (line 48) | def send_nowait(self, item: T) -> None:
method aclose (line 66) | async def aclose(self) -> None:
method close (line 70) | def close(self) -> None:
class _ReceiveStream (line 86) | class _ReceiveStream(Generic[T]):
method __init__ (line 87) | def __init__(self, queue: Queue, done: object) -> None:
method __aiter__ (line 97) | async def __aiter__(self) -> AsyncIterator[T]:
class _MemoryStream (line 106) | class _MemoryStream(Generic[T]):
method __init__ (line 119) | def __init__(self, loop: AbstractEventLoop) -> None:
method get_send_stream (line 132) | def get_send_stream(self) -> _SendStream[T]:
method get_receive_stream (line 142) | def get_receive_stream(self) -> _ReceiveStream[T]:
FILE: libs/core/langchain_core/tracers/root_listeners.py
class RootListenersTracer (line 23) | class RootListenersTracer(BaseTracer):
method __init__ (line 29) | def __init__(
method _persist_run (line 53) | def _persist_run(self, run: Run) -> None:
method _on_run_create (line 58) | def _on_run_create(self, run: Run) -> None:
method _on_run_update (line 67) | def _on_run_update(self, run: Run) -> None:
class AsyncRootListenersTracer (line 78) | class AsyncRootListenersTracer(AsyncBaseTracer):
method __init__ (line 84) | def __init__(
method _persist_run (line 108) | async def _persist_run(self, run: Run) -> None:
method _on_run_create (line 113) | async def _on_run_create(self, run: Run) -> None:
method _on_run_update (line 122) | async def _on_run_update(self, run: Run) -> None:
FILE: libs/core/langchain_core/tracers/run_collector.py
class RunCollectorCallbackHandler (line 11) | class RunCollectorCallbackHandler(BaseTracer):
method __init__ (line 19) | def __init__(self, example_id: UUID | str | None = None, **kwargs: Any...
method _persist_run (line 32) | def _persist_run(self, run: Run) -> None:
FILE: libs/core/langchain_core/tracers/stdout.py
function try_json_stringify (line 14) | def try_json_stringify(obj: Any, fallback: str) -> str:
function elapsed (line 30) | def elapsed(run: Any) -> str:
class FunctionCallbackHandler (line 48) | class FunctionCallbackHandler(BaseTracer):
method __init__ (line 57) | def __init__(self, function: Callable[[str], None], **kwargs: Any) -> ...
method _persist_run (line 66) | def _persist_run(self, run: Run) -> None:
method get_parents (line 69) | def get_parents(self, run: Run) -> list[Run]:
method get_breadcrumbs (line 89) | def get_breadcrumbs(self, run: Run) -> str:
method _on_chain_start (line 105) | def _on_chain_start(self, run: Run) -> None:
method _on_chain_end (line 114) | def _on_chain_end(self, run: Run) -> None:
method _on_chain_error (line 125) | def _on_chain_error(self, run: Run) -> None:
method _on_llm_start (line 136) | def _on_llm_start(self, run: Run) -> None:
method _on_llm_end (line 149) | def _on_llm_end(self, run: Run) -> None:
method _on_llm_error (line 159) | def _on_llm_error(self, run: Run) -> None:
method _on_tool_start (line 169) | def _on_tool_start(self, run: Run) -> None:
method _on_tool_end (line 177) | def _on_tool_end(self, run: Run) -> None:
method _on_tool_error (line 188) | def _on_tool_error(self, run: Run) -> None:
class ConsoleCallbackHandler (line 198) | class ConsoleCallbackHandler(FunctionCallbackHandler):
method __init__ (line 203) | def __init__(self, **kwargs: Any) -> None:
FILE: libs/core/langchain_core/utils/__init__.py
function __getattr__ (line 103) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 110) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/utils/_merge.py
function merge_dicts (line 6) | def merge_dicts(left: dict[str, Any], *others: dict[str, Any]) -> dict[s...
function merge_lists (line 89) | def merge_lists(left: list | None, *others: list | None) -> list | None:
function merge_obj (line 170) | def merge_obj(left: Any, right: Any) -> Any:
FILE: libs/core/langchain_core/utils/aiter.py
function py_anext (line 38) | def py_anext(
class NoLock (line 87) | class NoLock:
method __aenter__ (line 90) | async def __aenter__(self) -> None:
method __aexit__ (line 93) | async def __aexit__(
function tee_peer (line 103) | async def tee_peer(
class Tee (line 161) | class Tee(Generic[T]):
method __init__ (line 202) | def __init__(
method __len__ (line 229) | def __len__(self) -> int:
method __getitem__ (line 234) | def __getitem__(self, item: int) -> AsyncIterator[T]: ...
method __getitem__ (line 237) | def __getitem__(self, item: slice) -> tuple[AsyncIterator[T], ...]: ...
method __getitem__ (line 239) | def __getitem__(
method __iter__ (line 245) | def __iter__(self) -> Iterator[AsyncIterator[T]]:
method __aenter__ (line 253) | async def __aenter__(self) -> "Tee[T]":
method __aexit__ (line 257) | async def __aexit__(
method aclose (line 271) | async def aclose(self) -> None:
class aclosing (line 280) | class aclosing(AbstractAsyncContextManager): # noqa: N801
method __init__ (line 302) | def __init__(self, thing: AsyncGenerator[Any, Any] | AsyncIterator[Any...
method __aenter__ (line 311) | async def __aenter__(self) -> AsyncGenerator[Any, Any] | AsyncIterator...
method __aexit__ (line 315) | async def __aexit__(
function abatch_iterate (line 325) | async def abatch_iterate(
FILE: libs/core/langchain_core/utils/env.py
function env_var_is_set (line 9) | def env_var_is_set(env_var: str) -> bool:
function get_from_dict_or_env (line 26) | def get_from_dict_or_env(
function get_from_env (line 60) | def get_from_env(key: str, env_key: str, default: str | None = None) -> ...
FILE: libs/core/langchain_core/utils/formatting.py
class StrictFormatter (line 8) | class StrictFormatter(Formatter):
method vformat (line 23) | def vformat(
method validate_input_variables (line 50) | def validate_input_variables(
FILE: libs/core/langchain_core/utils/function_calling.py
class FunctionDescription (line 66) | class FunctionDescription(TypedDict):
class ToolDescription (line 79) | class ToolDescription(TypedDict):
function _rm_titles (line 89) | def _rm_titles(kv: dict, prev_key: str = "") -> dict:
function _convert_json_schema_to_openai_function (line 123) | def _convert_json_schema_to_openai_function(
function _convert_pydantic_to_openai_function (line 159) | def _convert_pydantic_to_openai_function(
function _get_python_function_name (line 209) | def _get_python_function_name(function: Callable) -> str:
function _convert_python_function_to_openai_function (line 214) | def _convert_python_function_to_openai_function(
function _convert_typed_dict_to_openai_function (line 245) | def _convert_typed_dict_to_openai_function(typed_dict: type) -> Function...
function _convert_any_typed_dicts_to_pydantic (line 258) | def _convert_any_typed_dicts_to_pydantic(
function _format_tool_to_openai_function (line 328) | def _format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
function convert_to_openai_function (line 375) | def convert_to_openai_function(
function convert_to_openai_tool (line 515) | def convert_to_openai_tool(
function convert_to_json_schema (line 577) | def convert_to_json_schema(
function tool_example_to_messages (line 621) | def tool_example_to_messages(
function _parse_google_docstring (line 728) | def _parse_google_docstring(
function _py_38_safe_origin (line 798) | def _py_38_safe_origin(origin: type) -> type:
function _recursive_set_additional_properties_false (line 802) | def _recursive_set_additional_properties_false(
FILE: libs/core/langchain_core/utils/html.py
function find_all_links (line 46) | def find_all_links(
function extract_sub_links (line 62) | def extract_sub_links(
FILE: libs/core/langchain_core/utils/image.py
function __getattr__ (line 6) | def __getattr__(name: str) -> Any:
FILE: libs/core/langchain_core/utils/input.py
function get_color_mapping (line 14) | def get_color_mapping(
function get_colored_text (line 38) | def get_colored_text(text: str, color: str) -> str:
function get_bolded_text (line 52) | def get_bolded_text(text: str) -> str:
function print_text (line 64) | def print_text(
FILE: libs/core/langchain_core/utils/interactive_env.py
function is_interactive_env (line 6) | def is_interactive_env() -> bool:
FILE: libs/core/langchain_core/utils/iter.py
class NoLock (line 19) | class NoLock:
method __enter__ (line 22) | def __enter__(self) -> None:
method __exit__ (line 25) | def __exit__(
function tee_peer (line 35) | def tee_peer(
class Tee (line 91) | class Tee(Generic[T]):
method __init__ (line 129) | def __init__(
method __len__ (line 156) | def __len__(self) -> int:
method __getitem__ (line 161) | def __getitem__(self, item: int) -> Iterator[T]: ...
method __getitem__ (line 164) | def __getitem__(self, item: slice) -> tuple[Iterator[T], ...]: ...
method __getitem__ (line 166) | def __getitem__(self, item: int | slice) -> Iterator[T] | tuple[Iterat...
method __iter__ (line 170) | def __iter__(self) -> Iterator[Iterator[T]]:
method __enter__ (line 178) | def __enter__(self) -> "Tee[T]":
method __exit__ (line 182) | def __exit__(
method close (line 196) | def close(self) -> None:
function batch_iterate (line 206) | def batch_iterate(size: int | None, iterable: Iterable[T]) -> Iterator[l...
FILE: libs/core/langchain_core/utils/json.py
function _replace_new_line (line 15) | def _replace_new_line(match: re.Match[str]) -> str:
function _custom_parser (line 33) | def _custom_parser(multiline_string: str | bytes | bytearray) -> str:
function parse_partial_json (line 58) | def parse_partial_json(s: str, *, strict: bool = False) -> Any:
function parse_json_markdown (line 142) | def parse_json_markdown(
function _parse_json (line 169) | def _parse_json(
function parse_and_check_json_markdown (line 194) | def parse_and_check_json_markdown(text: str, expected_keys: list[str]) -...
FILE: libs/core/langchain_core/utils/json_schema.py
function _retrieve_ref (line 12) | def _retrieve_ref(path: str, schema: dict) -> list | dict:
function _process_dict_properties (line 58) | def _process_dict_properties(
function _dereference_refs_helper (line 83) | def _dereference_refs_helper(
function dereference_refs (line 188) | def dereference_refs(
FILE: libs/core/langchain_core/utils/mustache.py
class ChevronError (line 32) | class ChevronError(SyntaxError):
function grab_literal (line 41) | def grab_literal(template: str, l_del: str) -> tuple[str, str]:
function l_sa_check (line 66) | def l_sa_check(
function r_sa_check (line 92) | def r_sa_check(
function parse_tag (line 118) | def parse_tag(template: str, l_del: str, r_del: str) -> tuple[tuple[str,...
function tokenize (line 199) | def tokenize(
function _html_escape (line 330) | def _html_escape(string: str) -> str:
function _get_key (line 345) | def _get_key(
function _get_partial (line 445) | def _get_partial(name: str, partials_dict: Mapping[str, str]) -> str:
function render (line 466) | def render(
FILE: libs/core/langchain_core/utils/pydantic.py
function get_pydantic_major_version (line 55) | def get_pydantic_major_version() -> int:
function is_pydantic_v1_subclass (line 78) | def is_pydantic_v1_subclass(cls: type) -> bool:
function is_pydantic_v2_subclass (line 87) | def is_pydantic_v2_subclass(cls: type) -> bool:
function is_basemodel_subclass (line 96) | def is_basemodel_subclass(cls: type) -> bool:
function is_basemodel_instance (line 114) | def is_basemodel_instance(obj: Any) -> bool:
function pre_init (line 129) | def pre_init(func: Callable) -> Any:
class _IgnoreUnserializable (line 189) | class _IgnoreUnserializable(GenerateJsonSchema):
method handle_invalid_for_json_schema (line 196) | def handle_invalid_for_json_schema(
function _create_subset_model_v1 (line 202) | def _create_subset_model_v1(
function _create_subset_model_v2 (line 231) | def _create_subset_model_v2(
function _create_subset_model (line 281) | def _create_subset_model(
function get_fields (line 312) | def get_fields(model: type[BaseModel]) -> dict[str, FieldInfoV2]: ...
function get_fields (line 316) | def get_fields(model: BaseModel) -> dict[str, FieldInfoV2]: ...
function get_fields (line 320) | def get_fields(model: type[BaseModelV1]) -> dict[str, ModelField]: ...
function get_fields (line 324) | def get_fields(model: BaseModelV1) -> dict[str, ModelField]: ...
function get_fields (line 327) | def get_fields(
function _create_root_model (line 355) | def _create_root_model(
function _create_root_model_cached (line 417) | def _create_root_model_cached(
function _create_model_cached (line 430) | def _create_model_cached(
function create_model (line 442) | def create_model(
function _remap_field_definitions (line 487) | def _remap_field_definitions(field_definitions: dict[str, Any]) -> dict[...
function create_model_v2 (line 515) | def create_model_v2(
FILE: libs/core/langchain_core/utils/strings.py
function stringify_value (line 7) | def stringify_value(val: Any) -> str:
function stringify_dict (line 25) | def stringify_dict(data: dict) -> str:
function comma_list (line 37) | def comma_list(items: Iterable[Any]) -> str:
function sanitize_for_postgres (line 49) | def sanitize_for_postgres(text: str, replacement: str = "") -> str:
FILE: libs/core/langchain_core/utils/usage.py
function _dict_int_op (line 6) | def _dict_int_op(
FILE: libs/core/langchain_core/utils/utils.py
function xor_args (line 24) | def xor_args(*arg_groups: tuple[str, ...]) -> Callable:
function raise_for_status_with_text (line 58) | def raise_for_status_with_text(response: Response) -> None:
function mock_now (line 74) | def mock_now(dt_value: datetime.datetime) -> Iterator[type]:
function guard_import (line 116) | def guard_import(
function check_package_version (line 146) | def check_package_version(
function get_pydantic_field_names (line 193) | def get_pydantic_field_names(pydantic_cls: Any) -> set[str]:
function _build_model_kwargs (line 216) | def _build_model_kwargs(
function build_extra_kwargs (line 262) | def build_extra_kwargs(
function convert_to_secret_str (line 313) | def convert_to_secret_str(value: SecretStr | str) -> SecretStr:
class _NoDefaultType (line 327) | class _NoDefaultType:
function from_env (line 335) | def from_env(key: str, /) -> Callable[[], str]: ...
function from_env (line 339) | def from_env(key: str, /, *, default: str) -> Callable[[], str]: ...
function from_env (line 343) | def from_env(key: Sequence[str], /, *, default: str) -> Callable[[], str...
function from_env (line 347) | def from_env(key: str, /, *, error_message: str) -> Callable[[], str]: ...
function from_env (line 351) | def from_env(
function from_env (line 357) | def from_env(
function from_env (line 363) | def from_env(
function from_env (line 368) | def from_env(
function secret_from_env (line 425) | def secret_from_env(key: str | Sequence[str], /) -> Callable[[], SecretS...
function secret_from_env (line 429) | def secret_from_env(key: str, /, *, default: str) -> Callable[[], Secret...
function secret_from_env (line 433) | def secret_from_env(
function secret_from_env (line 439) | def secret_from_env(key: str, /, *, error_message: str) -> Callable[[], ...
function secret_from_env (line 442) | def secret_from_env(
function ensure_id (line 509) | def ensure_id(id_val: str | None) -> str:
FILE: libs/core/langchain_core/utils/uuid.py
function _to_timestamp_and_nanos (line 20) | def _to_timestamp_and_nanos(nanoseconds: int) -> tuple[int, int]:
function uuid7 (line 26) | def uuid7(nanoseconds: int | None = None) -> UUID:
FILE: libs/core/langchain_core/vectorstores/__init__.py
function __getattr__ (line 26) | def __getattr__(attr_name: str) -> object:
function __dir__ (line 47) | def __dir__() -> list[str]:
FILE: libs/core/langchain_core/vectorstores/base.py
class VectorStore (line 43) | class VectorStore(ABC):
method add_texts (line 46) | def add_texts(
method embeddings (line 100) | def embeddings(self) -> Embeddings | None:
method delete (line 108) | def delete(self, ids: list[str] | None = None, **kwargs: Any) -> bool ...
method get_by_ids (line 122) | def get_by_ids(self, ids: Sequence[str], /) -> list[Document]:
method aget_by_ids (line 148) | async def aget_by_ids(self, ids: Sequence[str], /) -> list[Document]:
method adelete (line 172) | async def adelete(self, ids: list[str] | None = None, **kwargs: Any) -...
method aadd_texts (line 185) | async def aadd_texts(
method add_documents (line 234) | def add_documents(self, documents: list[Document], **kwargs: Any) -> l...
method aadd_documents (line 265) | async def aadd_documents(
method search (line 293) | def search(self, query: str, search_type: str, **kwargs: Any) -> list[...
method asearch (line 326) | async def asearch(
method similarity_search (line 361) | def similarity_search(
method _euclidean_relevance_score_fn (line 376) | def _euclidean_relevance_score_fn(distance: float) -> float:
method _cosine_relevance_score_fn (line 391) | def _cosine_relevance_score_fn(distance: float) -> float:
method _max_inner_product_relevance_score_fn (line 396) | def _max_inner_product_relevance_score_fn(distance: float) -> float:
method _select_relevance_score_fn (line 403) | def _select_relevance_score_fn(self) -> Callable[[float], float]:
method similarity_search_with_score (line 417) | def similarity_search_with_score(
method asimilarity_search_with_score (line 431) | async def asimilarity_search_with_score(
method _similarity_search_with_relevance_scores (line 450) | def _similarity_search_with_relevance_scores(
method _asimilarity_search_with_relevance_scores (line 478) | async def _asimilarity_search_with_relevance_scores(
method similarity_search_with_relevance_scores (line 506) | def similarity_search_with_relevance_scores(
method asimilarity_search_with_relevance_scores (line 556) | async def asimilarity_search_with_relevance_scores(
method asimilarity_search (line 606) | async def asimilarity_search(
method similarity_search_by_vector (line 624) | def similarity_search_by_vector(
method asimilarity_search_by_vector (line 639) | async def asimilarity_search_by_vector(
method max_marginal_relevance_search (line 659) | def max_marginal_relevance_search(
method amax_marginal_relevance_search (line 686) | async def amax_marginal_relevance_search(
method max_marginal_relevance_search_by_vector (line 724) | def max_marginal_relevance_search_by_vector(
method amax_marginal_relevance_search_by_vector (line 751) | async def amax_marginal_relevance_search_by_vector(
method from_documents (line 787) | def from_documents(
method afrom_documents (line 817) | async def afrom_documents(
method from_texts (line 848) | def from_texts(
method afrom_texts (line 871) | async def afrom_texts(
method _get_retriever_tags (line 898) | def _get_retriever_tags(self) -> list[str]:
method as_retriever (line 905) | def as_retriever(self, **kwargs: Any) -> VectorStoreRetriever:
class VectorStoreRetriever (line 964) | class VectorStoreRetriever(BaseRetriever):
method validate_search_type (line 988) | def validate_search_type(cls, values: dict) -> Any:
method _get_ls_params (line 1018) | def _get_ls_params(self, **kwargs: Any) -> LangSmithRetrieverParams:
method _get_relevant_documents (line 1040) | def _get_relevant_documents(
method _aget_relevant_documents (line 1061) | async def _aget_relevant_documents(
method add_documents (line 1087) | def add_documents(self, documents: list[Document], **kwargs: Any) -> l...
method aadd_documents (line 1099) | async def aadd_documents(
FILE: libs/core/langchain_core/vectorstores/in_memory.py
class InMemoryVectorStore (line 34) | class InMemoryVectorStore(VectorStore):
method __init__ (line 161) | def __init__(self, embedding: Embeddings) -> None:
method embeddings (line 174) | def embeddings(self) -> Embeddings:
method delete (line 178) | def delete(self, ids: Sequence[str] | None = None, **kwargs: Any) -> N...
method adelete (line 184) | async def adelete(self, ids: Sequence[str] | None = None, **kwargs: An...
method add_documents (line 188) | def add_documents(
method aadd_documents (line 224) | async def aadd_documents(
method get_by_ids (line 256) | def get_by_ids(self, ids: Sequence[str], /) -> list[Document]:
method aget_by_ids (line 280) | async def aget_by_ids(self, ids: Sequence[str], /) -> list[Document]:
method _similarity_search_with_score_by_vector (line 291) | def _similarity_search_with_score_by_vector(
method similarity_search_with_score_by_vector (line 334) | def similarity_search_with_score_by_vector(
method similarity_search_with_score (line 359) | def similarity_search_with_score(
method asimilarity_search_with_score (line 373) | async def asimilarity_search_with_score(
method similarity_search_by_vector (line 384) | def similarity_search_by_vector(
method asimilarity_search_by_vector (line 398) | async def asimilarity_search_by_vector(
method similarity_search (line 404) | def similarity_search(
method asimilarity_search (line 410) | async def asimilarity_search(
method max_marginal_relevance_search_by_vector (line 419) | def max_marginal_relevance_search_by_vector(
method max_marginal_relevance_search (line 451) | def max_marginal_relevance_search(
method amax_marginal_relevance_search (line 469) | async def amax_marginal_relevance_search(
method from_texts (line 488) | def from_texts(
method afrom_texts (line 503) | async def afrom_texts(
method load (line 517) | def load(
method dump (line 537) | def dump(self, path: str) -> None:
FILE: libs/core/langchain_core/vectorstores/utils.py
function _cosine_similarity (line 35) | def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
function maximal_marginal_relevance (line 106) | def maximal_marginal_relevance(
FILE: libs/core/scripts/check_version.py
function get_pyproject_version (line 13) | def get_pyproject_version(pyproject_path: Path) -> str | None:
function get_version_py_version (line 20) | def get_version_py_version(version_path: Path) -> str | None:
function main (line 27) | def main() -> int:
FILE: libs/core/tests/benchmarks/test_async_callbacks.py
class MyCustomAsyncHandler (line 16) | class MyCustomAsyncHandler(AsyncCallbackHandler):
method on_chat_model_start (line 18) | async def on_chat_model_start(
method on_llm_new_token (line 34) | async def on_llm_new_token(
function test_async_callbacks_in_sync (line 48) | async def test_async_callbacks_in_sync(benchmark: BenchmarkFixture) -> N...
FILE: libs/core/tests/benchmarks/test_imports.py
function test_import_time (line 51) | def test_import_time(benchmark: BenchmarkFixture, import_path: str) -> N...
FILE: libs/core/tests/integration_tests/test_compile.py
function test_placeholder (line 5) | def test_placeholder() -> None:
FILE: libs/core/tests/unit_tests/_api/test_beta_decorator.py
function test_warn_beta (line 48) | def test_warn_beta(kwargs: dict[str, Any], expected_message: str) -> None:
function beta_function (line 61) | def beta_function() -> str:
function beta_async_function (line 67) | async def beta_async_function() -> str:
class ClassWithBetaMethods (line 72) | class ClassWithBetaMethods:
method __init__ (line 73) | def __init__(self) -> None:
method beta_method (line 77) | def beta_method(self) -> str:
method beta_async_method (line 82) | async def beta_async_method(self) -> str:
method beta_classmethod (line 88) | def beta_classmethod(cls) -> str:
method beta_staticmethod (line 94) | def beta_staticmethod() -> str:
method beta_property (line 99) | def beta_property(self) -> str:
method beta_property (line 104) | def beta_property(self, _value: str) -> None:
method beta_property (line 109) | def beta_property(self) -> None:
function test_beta_function (line 113) | def test_beta_function() -> None:
function test_beta_async_function (line 133) | async def test_beta_async_function() -> None:
function test_beta_method (line 152) | def test_beta_method() -> None:
function test_beta_async_method (line 173) | async def test_beta_async_method() -> None:
function test_beta_classmethod (line 193) | def test_beta_classmethod() -> None:
function test_beta_staticmethod (line 210) | def test_beta_staticmethod() -> None:
function test_beta_property (line 229) | def test_beta_property() -> None:
function test_whole_class_beta (line 252) | def test_whole_class_beta() -> None:
function test_whole_class_inherited_beta (line 287) | def test_whole_class_inherited_beta() -> None:
class MyModel (line 359) | class MyModel(BaseModel):
method beta_method (line 361) | def beta_method(self) -> str:
function test_beta_method_pydantic (line 366) | def test_beta_method_pydantic() -> None:
FILE: libs/core/tests/unit_tests/_api/test_deprecation.py
function test_warn_deprecated (line 62) | def test_warn_deprecated(kwargs: dict[str, Any], expected_message: str) ...
function test_undefined_deprecation_schedule (line 74) | def test_undefined_deprecation_schedule() -> None:
function deprecated_function (line 81) | def deprecated_function() -> str:
function deprecated_async_function (line 87) | async def deprecated_async_function() -> str:
class ClassWithDeprecatedMethods (line 92) | class ClassWithDeprecatedMethods:
method __init__ (line 93) | def __init__(self) -> None:
method deprecated_method (line 97) | def deprecated_method(self) -> str:
method deprecated_async_method (line 102) | async def deprecated_async_method(self) -> str:
method deprecated_classmethod (line 108) | def deprecated_classmethod(cls) -> str:
method deprecated_staticmethod (line 114) | def deprecated_staticmethod() -> str:
method deprecated_property (line 120) | def deprecated_property(self) -> str:
function test_deprecated_function (line 125) | def test_deprecated_function() -> None:
function test_deprecated_async_function (line 144) | async def test_deprecated_async_function() -> None:
function test_deprecated_method (line 165) | def test_deprecated_method() -> None:
function test_deprecated_async_method (line 185) | async def test_deprecated_async_method() -> None:
function test_deprecated_classmethod (line 207) | def test_deprecated_classmethod() -> None:
function test_deprecated_staticmethod (line 224) | def test_deprecated_staticmethod() -> None:
function test_deprecated_property (line 244) | def test_deprecated_property() -> None:
function test_whole_class_deprecation (line 264) | def test_whole_class_deprecation() -> None:
function test_whole_class_inherited_deprecation (line 302) | def test_whole_class_inherited_deprecation() -> None:
class MyModel (line 383) | class MyModel(BaseModel):
method deprecated_method (line 385) | def deprecated_method(self) -> str:
function test_deprecated_method_pydantic (line 390) | def test_deprecated_method_pydantic() -> None:
function test_raise_error_for_bad_decorator (line 408) | def test_raise_error_for_bad_decorator() -> None:
function test_rename_parameter (line 421) | def test_rename_parameter() -> None:
function test_rename_parameter_for_async_func (line 446) | async def test_rename_parameter_for_async_func() -> None:
function test_rename_parameter_method (line 470) | def test_rename_parameter_method() -> None:
function test_deprecated_function_has_pep702_attribute (line 505) | def test_deprecated_function_has_pep702_attribute() -> None:
function test_deprecated_function_with_alternative_import_has_pep702_attribute (line 517) | def test_deprecated_function_with_alternative_import_has_pep702_attribut...
function test_deprecated_function_without_alternative_has_pep702_attribute (line 531) | def test_deprecated_function_without_alternative_has_pep702_attribute() ...
function test_deprecated_class_has_pep702_attribute (line 543) | def test_deprecated_class_has_pep702_attribute() -> None:
function test_deprecated_class_without_alternative_has_pep702_attribute (line 555) | def test_deprecated_class_without_alternative_has_pep702_attribute() -> ...
function test_deprecated_property_has_pep702_attribute (line 567) | def test_deprecated_property_has_pep702_attribute() -> None:
FILE: libs/core/tests/unit_tests/_api/test_imports.py
function test_all_imports (line 18) | def test_all_imports() -> None:
FILE: libs/core/tests/unit_tests/_api/test_path.py
function test_as_import_path (line 10) | def test_as_import_path() -> None:
FILE: libs/core/tests/unit_tests/caches/test_in_memory_cache.py
function cache (line 8) | def cache() -> InMemoryCache:
function cache_item (line 13) | def cache_item(item_id: int) -> tuple[str, str, RETURN_VAL_TYPE]:
function test_initialization (line 21) | def test_initialization() -> None:
function test_lookup (line 35) | def test_lookup(
function test_update_with_no_maxsize (line 45) | def test_update_with_no_maxsize(cache: InMemoryCache) -> None:
function test_update_with_maxsize (line 52) | def test_update_with_maxsize() -> None:
function test_clear (line 72) | def test_clear(cache: InMemoryCache) -> None:
function test_alookup (line 80) | async def test_alookup(cache: InMemoryCache) -> None:
function test_aupdate_with_no_maxsize (line 88) | async def test_aupdate_with_no_maxsize(cache: InMemoryCache) -> None:
function test_aupdate_with_maxsize (line 95) | async def test_aupdate_with_maxsize() -> None:
function test_aclear (line 114) | async def test_aclear(cache: InMemoryCache) -> None:
FILE: libs/core/tests/unit_tests/callbacks/test_async_callback_manager.py
function test_inline_handlers_share_parent_context (line 22) | async def test_inline_handlers_share_parent_context() -> None:
function test_inline_handlers_share_parent_context_multiple (line 78) | async def test_inline_handlers_share_parent_context_multiple() -> None:
function test_shielded_callback_context_preservation (line 154) | async def test_shielded_callback_context_preservation() -> None:
FILE: libs/core/tests/unit_tests/callbacks/test_dispatch_custom_event.py
class AsyncCustomCallbackHandler (line 17) | class AsyncCustomCallbackHandler(AsyncCallbackHandler):
method __init__ (line 18) | def __init__(self) -> None:
method on_custom_event (line 21) | async def on_custom_event(
function test_custom_event_root_dispatch (line 43) | def test_custom_event_root_dispatch() -> None:
function test_async_custom_event_root_dispatch (line 53) | async def test_async_custom_event_root_dispatch() -> None:
function test_async_custom_event_implicit_config (line 67) | async def test_async_custom_event_implicit_config() -> None:
function test_async_callback_manager (line 91) | async def test_async_callback_manager() -> None:
function test_sync_callback_manager (line 114) | def test_sync_callback_manager() -> None:
FILE: libs/core/tests/unit_tests/callbacks/test_handle_event.py
class _FallbackChatHandler (line 23) | class _FallbackChatHandler(BaseCallbackHandler):
method on_chat_model_start (line 29) | def on_chat_model_start(
method on_llm_start (line 37) | def on_llm_start(self, *args: Any, **kwargs: Any) -> None:
class _FallbackChatHandlerAsync (line 41) | class _FallbackChatHandlerAsync(BaseCallbackHandler):
method on_chat_model_start (line 46) | def on_chat_model_start(
method on_llm_start (line 54) | def on_llm_start(self, *args: Any, **kwargs: Any) -> None:
function test_handle_event_chat_model_start_fallback_to_llm_start (line 58) | def test_handle_event_chat_model_start_fallback_to_llm_start() -> None:
function test_handle_event_other_event_not_implemented_logs_warning (line 77) | def test_handle_event_other_event_not_implemented_logs_warning() -> None:
function test_ahandle_event_chat_model_start_fallback_to_llm_start (line 97) | async def test_ahandle_event_chat_model_start_fallback_to_llm_start() ->...
function test_ahandle_event_other_event_not_implemented_logs_warning (line 117) | async def test_ahandle_event_other_event_not_implemented_logs_warning() ...
FILE: libs/core/tests/unit_tests/callbacks/test_imports.py
function test_all_imports (line 41) | def test_all_imports() -> None:
FILE: libs/core/tests/unit_tests/callbacks/test_sync_callback_manager.py
function test_remove_handler (line 4) | def test_remove_handler() -> None:
function test_merge_preserves_handler_distinction (line 18) | def test_merge_preserves_handler_distinction() -> None:
FILE: libs/core/tests/unit_tests/callbacks/test_usage_callback.py
class FakeChatModelWithResponseMetadata (line 49) | class FakeChatModelWithResponseMetadata(GenericFakeChatModel):
method _generate (line 52) | def _generate(self, *args: Any, **kwargs: Any) -> ChatResult:
function test_usage_callback (line 60) | def test_usage_callback() -> None:
function test_usage_callback_async (line 100) | async def test_usage_callback_async() -> None:
FILE: libs/core/tests/unit_tests/chat_history/test_chat_history.py
function test_add_message_implementation_only (line 7) | def test_add_message_implementation_only() -> None:
function test_bulk_message_implementation_only (line 42) | def test_bulk_message_implementation_only() -> None:
function test_async_interface (line 77) | async def test_async_interface() -> None:
FILE: libs/core/tests/unit_tests/conftest.py
function blockbuster (line 13) | def blockbuster() -> Iterator[BlockBuster]:
function pytest_addoption (line 35) | def pytest_addoption(parser: pytest.Parser) -> None:
function pytest_collection_modifyitems (line 49) | def pytest_collection_modifyitems(
function deterministic_uuids (line 116) | def deterministic_uuids(mocker: MockerFixture) -> MockerFixture:
FILE: libs/core/tests/unit_tests/document_loaders/test_base.py
function test_base_blob_parser (line 13) | def test_base_blob_parser() -> None:
function test_default_lazy_load (line 36) | def test_default_lazy_load() -> None:
function test_lazy_load_not_implemented (line 50) | def test_lazy_load_not_implemented() -> None:
function test_default_aload (line 59) | async def test_default_aload() -> None:
FILE: libs/core/tests/unit_tests/document_loaders/test_langsmith.py
function test_init (line 12) | def test_init() -> None:
function test_lazy_load (line 42) | def test_lazy_load() -> None:
FILE: libs/core/tests/unit_tests/documents/test_document.py
function test_init (line 4) | def test_init() -> None:
FILE: libs/core/tests/unit_tests/documents/test_imports.py
function test_all_imports (line 6) | def test_all_imports() -> None:
FILE: libs/core/tests/unit_tests/documents/test_str.py
function test_str (line 4) |
Copy disabled (too large)
Download .json
Condensed preview — 2745 files, each showing path, character count, and a content snippet. Download the .json file for the full structured content (13,650K chars).
[
{
"path": ".devcontainer/README.md",
"chars": 3684,
"preview": "# Dev container\n\nThis project includes a [dev container](https://containers.dev/), which lets you use a container as a f"
},
{
"path": ".devcontainer/devcontainer.json",
"chars": 2216,
"preview": "// For format details, see https://aka.ms/devcontainer.json. For config options, see the\n// README at: https://github.co"
},
{
"path": ".devcontainer/docker-compose.yaml",
"chars": 205,
"preview": "version: '3'\nservices:\n langchain:\n build:\n dockerfile: libs/langchain/dev.Dockerfile\n context: ..\n\n ne"
},
{
"path": ".dockerignore",
"chars": 258,
"preview": "# Git\n.git\n.github\n\n# Python\n__pycache__\n*.pyc\n*.pyo\n.venv\n.mypy_cache\n.pytest_cache\n.ruff_cache\n*.egg-info\n.tox\n\n# IDE\n"
},
{
"path": ".editorconfig",
"chars": 806,
"preview": "# top-most EditorConfig file\nroot = true\n\n# All files\n[*]\ncharset = utf-8\nend_of_line = lf\ninsert_final_newline = true\nt"
},
{
"path": ".gitattributes",
"chars": 88,
"preview": "* text=auto eol=lf\n*.{cmd,[cC][mM][dD]} text eol=crlf\n*.{bat,[bB][aA][tT]} text eol=crlf"
},
{
"path": ".github/CODEOWNERS",
"chars": 90,
"preview": "/.github/ @ccurme @eyurtsev @mdrxy\n/libs/core/ @eyurtsev\n/libs/partners/ @ccurme @mdrxy\n"
},
{
"path": ".github/ISSUE_TEMPLATE/bug-report.yml",
"chars": 6076,
"preview": "name: \"\\U0001F41B Bug Report\"\ndescription: Report a bug in LangChain. To report a security issue, please instead use the"
},
{
"path": ".github/ISSUE_TEMPLATE/config.yml",
"chars": 668,
"preview": "blank_issues_enabled: false\nversion: 2.1\ncontact_links:\n - name: 💬 LangChain Forum\n url: https://forum.langchain.co"
},
{
"path": ".github/ISSUE_TEMPLATE/feature-request.yml",
"chars": 5677,
"preview": "name: \"✨ Feature Request\"\ndescription: Request a new feature or enhancement for LangChain. For questions, please use the"
},
{
"path": ".github/ISSUE_TEMPLATE/privileged.yml",
"chars": 1850,
"preview": "name: 🔒 Privileged\ndescription: You are a LangChain maintainer, or was asked directly by a maintainer to create an issue"
},
{
"path": ".github/ISSUE_TEMPLATE/task.yml",
"chars": 3828,
"preview": "name: \"📋 Task\"\ndescription: Create a task for project management and tracking by LangChain maintainers. If you are not a"
},
{
"path": ".github/PULL_REQUEST_TEMPLATE.md",
"chars": 2228,
"preview": "Fixes #\n\n<!-- Replace everything above this line with a 1-2 sentence description of your change. Keep the \"Fixes #xx\" ke"
},
{
"path": ".github/actions/uv_setup/action.yml",
"chars": 1152,
"preview": "# Helper to set up Python and uv with caching\n\nname: uv-install\ndescription: Set up Python and uv with caching\n\ninputs:\n"
},
{
"path": ".github/dependabot.yml",
"chars": 2244,
"preview": "# Please see the documentation for all configuration options:\n# https://docs.github.com/github/administering-a-repositor"
},
{
"path": ".github/scripts/check_diff.py",
"chars": 12611,
"preview": "\"\"\"Analyze git diffs to determine which directories need to be tested.\n\nIntelligently determines which LangChain package"
},
{
"path": ".github/scripts/check_prerelease_dependencies.py",
"chars": 1279,
"preview": "\"\"\"Check that no dependencies allow prereleases unless we're releasing a prerelease.\"\"\"\n\nimport sys\n\nimport tomllib\n\nif "
},
{
"path": ".github/scripts/get_min_versions.py",
"chars": 6691,
"preview": "\"\"\"Get minimum versions of dependencies from a pyproject.toml file.\"\"\"\n\nimport sys\nfrom collections import defaultdict\n\n"
},
{
"path": ".github/scripts/pr-labeler-config.json",
"chars": 3744,
"preview": "{\n \"trustedThreshold\": 5,\n \"labelColor\": \"b76e79\",\n \"sizeThresholds\": [\n { \"label\": \"size: XS\", \"max\": 50 },\n {"
},
{
"path": ".github/scripts/pr-labeler.js",
"chars": 8957,
"preview": "// Shared helpers for pr_labeler.yml and tag-external-issues.yml.\n//\n// Usage from actions/github-script (requires actio"
},
{
"path": ".github/tools/git-restore-mtime",
"chars": 25990,
"preview": "#!/usr/bin/env python3\n#\n# git-restore-mtime - Change mtime of files based on commit date of last change\n#\n# Copyrigh"
},
{
"path": ".github/workflows/_compile_integration_test.yml",
"chars": 1825,
"preview": "# Validates that a package's integration tests compile without syntax or import errors.\n#\n# (If an integration test fail"
},
{
"path": ".github/workflows/_lint.yml",
"chars": 2513,
"preview": "# Runs linting.\n#\n# Uses the package's Makefile to run the checks, specifically the\n# `lint_package` and `lint_tests` ta"
},
{
"path": ".github/workflows/_refresh_model_profiles.yml",
"chars": 7380,
"preview": "# Reusable workflow: refreshes model profile data for any repo that uses the\n# `langchain-profiles` CLI. Creates (or upd"
},
{
"path": ".github/workflows/_release.yml",
"chars": 25214,
"preview": "# Builds and publishes LangChain packages to PyPI.\n#\n# Manually triggered, though can be used as a reusable workflow (wo"
},
{
"path": ".github/workflows/_test.yml",
"chars": 2719,
"preview": "# Runs unit tests with both current and minimum supported dependency versions\n# to ensure compatibility across the suppo"
},
{
"path": ".github/workflows/_test_pydantic.yml",
"chars": 1987,
"preview": "# Facilitate unit testing against different Pydantic versions for a provided package.\n\nname: \"🐍 Pydantic Version Testing"
},
{
"path": ".github/workflows/auto-label-by-package.yml",
"chars": 4041,
"preview": "name: Auto Label Issues by Package\n\non:\n issues:\n types: [opened, edited]\n\npermissions:\n contents: read\n\njobs:\n la"
},
{
"path": ".github/workflows/check_agents_sync.yml",
"chars": 1133,
"preview": "# Ensures CLAUDE.md and AGENTS.md stay synchronized.\n#\n# These files contain the same development guidelines but are nam"
},
{
"path": ".github/workflows/check_core_versions.yml",
"chars": 2835,
"preview": "# Ensures version numbers in pyproject.toml and version.py stay in sync.\n#\n# (Prevents releases with mismatched version "
},
{
"path": ".github/workflows/check_diffs.yml",
"chars": 6740,
"preview": "# Primary CI workflow.\n#\n# Only runs against packages that have changed files.\n#\n# Runs:\n# - Linting (_lint.yml)\n# - Uni"
},
{
"path": ".github/workflows/close_unchecked_issues.yml",
"chars": 3656,
"preview": "# Auto-close issues that bypass or ignore the issue template checkboxes.\n#\n# GitHub issue forms enforce `required: true`"
},
{
"path": ".github/workflows/codspeed.yml",
"chars": 2947,
"preview": "# CodSpeed performance benchmarks.\n#\n# Runs benchmarks on changed packages and uploads results to CodSpeed.\n# Separated "
},
{
"path": ".github/workflows/integration_tests.yml",
"chars": 12297,
"preview": "# Routine integration tests against partner libraries with live API credentials.\n#\n# Uses `make integration_tests` withi"
},
{
"path": ".github/workflows/pr_labeler.yml",
"chars": 8698,
"preview": "# Unified PR labeler — applies size, file-based, title-based, and\n# contributor classification labels in a single sequen"
},
{
"path": ".github/workflows/pr_labeler_backfill.yml",
"chars": 4796,
"preview": "# Backfill PR labels on all open PRs.\n#\n# Manual-only workflow that applies the same labels as pr_labeler.yml\n# (size, f"
},
{
"path": ".github/workflows/pr_lint.yml",
"chars": 4068,
"preview": "# PR title linting.\n#\n# FORMAT (Conventional Commits 1.0.0):\n#\n# <type>[optional scope]: <description>\n# [optional b"
},
{
"path": ".github/workflows/refresh_model_profiles.yml",
"chars": 2113,
"preview": "# Refreshes model profile data for all in-monorepo partner integrations by\n# pulling the latest metadata from models.dev"
},
{
"path": ".github/workflows/reopen_on_assignment.yml",
"chars": 6294,
"preview": "# Reopen PRs that were auto-closed by require_issue_link.yml when the\n# contributor was not assigned to the linked issue"
},
{
"path": ".github/workflows/require_issue_link.yml",
"chars": 21414,
"preview": "# Require external PRs to reference an approved issue (e.g. Fixes #NNN) and\n# the PR author to be assigned to that issue"
},
{
"path": ".github/workflows/tag-external-issues.yml",
"chars": 7658,
"preview": "# Automatically tag issues as \"external\" or \"internal\" based on whether\n# the author is a member of the langchain-ai Git"
},
{
"path": ".github/workflows/v03_api_doc_build.yml",
"chars": 6472,
"preview": "# Build the API reference documentation for v0.3 branch.\n#\n# Manual trigger only.\n#\n# Built HTML pushed to langchain-ai/"
},
{
"path": ".gitignore",
"chars": 2339,
"preview": ".vs/\n.claude/\n.idea/\n#Emacs backup\n*~\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C ext"
},
{
"path": ".markdownlint.json",
"chars": 195,
"preview": "{\n \"MD013\": false,\n \"MD024\": {\n \"siblings_only\": true\n },\n \"MD025\": false,\n \"MD033\": false,\n \"MD034\": false,\n "
},
{
"path": ".mcp.json",
"chars": 233,
"preview": "{\n \"mcpServers\": {\n \"docs-langchain\": {\n \"type\": \"http\",\n \"url\": \"https://docs.langchain.com/mcp\"\n },\n "
},
{
"path": ".pre-commit-config.yaml",
"chars": 4600,
"preview": "repos:\n - repo: https://github.com/pre-commit/pre-commit-hooks\n rev: v4.3.0\n hooks:\n - id: no-commit-to-bran"
},
{
"path": ".vscode/extensions.json",
"chars": 521,
"preview": "{\n \"recommendations\": [\n \"ms-python.python\",\n \"charliermarsh.ruff\",\n \"ms-python.mypy-type-checker\",\n \"ms-to"
},
{
"path": ".vscode/settings.json",
"chars": 1950,
"preview": "{\n \"python.analysis.include\": [\n \"libs/**\",\n ],\n \"python.analysis.exclude\": [\n \"**/node_modules\",\n \"**/__pyc"
},
{
"path": "AGENTS.md",
"chars": 10594,
"preview": "# Global development guidelines for the LangChain monorepo\n\nThis document provides context to understand the LangChain P"
},
{
"path": "CITATION.cff",
"chars": 234,
"preview": "cff-version: 1.2.0\nmessage: \"If you use this software, please cite it as below.\"\nauthors:\n- family-names: \"Chase\"\n give"
},
{
"path": "CLAUDE.md",
"chars": 10594,
"preview": "# Global development guidelines for the LangChain monorepo\n\nThis document provides context to understand the LangChain P"
},
{
"path": "LICENSE",
"chars": 1067,
"preview": "MIT License\n\nCopyright (c) LangChain, Inc.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy"
},
{
"path": "README.md",
"chars": 5760,
"preview": "<div align=\"center\">\n <a href=\"https://docs.langchain.com/oss/python/langchain/overview\">\n <picture>\n <source m"
},
{
"path": "libs/Makefile",
"chars": 519,
"preview": "# Makefile for libs/ directory\n# Contains targets that operate across multiple packages\n\nLANGCHAIN_DIRS = core text-spli"
},
{
"path": "libs/README.md",
"chars": 2072,
"preview": "# LangChain Monorepo\n\n> [!IMPORTANT]\n> Refer to the [LangChain contributing guide](https://docs.langchain.com/oss/python"
},
{
"path": "libs/core/Makefile",
"chars": 2961,
"preview": ".PHONY: all format lint type test tests test_watch integration_tests help extended_tests check_version\n\n# Default target"
},
{
"path": "libs/core/README.md",
"chars": 2706,
"preview": "# 🦜🍎️ LangChain Core\n\n[](https://pypi.org/proje"
},
{
"path": "libs/core/extended_testing_deps.txt",
"chars": 13,
"preview": "jinja2>=3,<4\n"
},
{
"path": "libs/core/langchain_core/__init__.py",
"chars": 679,
"preview": "\"\"\"`langchain-core` defines the base abstractions for the LangChain ecosystem.\n\nThe interfaces for core components like "
},
{
"path": "libs/core/langchain_core/_api/__init__.py",
"chars": 2600,
"preview": "\"\"\"Helper functions for managing the LangChain API.\n\nThis module is only relevant for LangChain developers, not for user"
},
{
"path": "libs/core/langchain_core/_api/beta_decorator.py",
"chars": 8570,
"preview": "\"\"\"Helper functions for marking parts of the LangChain API as beta.\n\nThis module was loosely adapted from matplotlib's ["
},
{
"path": "libs/core/langchain_core/_api/deprecation.py",
"chars": 21550,
"preview": "\"\"\"Helper functions for deprecating parts of the LangChain API.\n\nThis module was adapted from matplotlib's [`_api/deprec"
},
{
"path": "libs/core/langchain_core/_api/internal.py",
"chars": 720,
"preview": "import inspect\nfrom typing import cast\n\n\ndef is_caller_internal(depth: int = 2) -> bool:\n \"\"\"Return whether the calle"
},
{
"path": "libs/core/langchain_core/_api/path.py",
"chars": 1349,
"preview": "import os\nfrom pathlib import Path\n\nHERE = Path(__file__).parent\n\n# Get directory of langchain package\nPACKAGE_DIR = HER"
},
{
"path": "libs/core/langchain_core/_import_utils.py",
"chars": 1428,
"preview": "from importlib import import_module\n\n\ndef import_attr(\n attr_name: str,\n module_name: str | None,\n package: str"
},
{
"path": "libs/core/langchain_core/_security/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "libs/core/langchain_core/_security/_ssrf_protection.py",
"chars": 12507,
"preview": "\"\"\"SSRF Protection for validating URLs against Server-Side Request Forgery attacks.\n\nThis module provides utilities to v"
},
{
"path": "libs/core/langchain_core/agents.py",
"chars": 8426,
"preview": "\"\"\"Schema definitions for representing agent actions, observations, and return values.\n\n!!! warning\n\n The schema defi"
},
{
"path": "libs/core/langchain_core/caches.py",
"chars": 10403,
"preview": "\"\"\"Optional caching layer for language models.\n\nDistinct from provider-based [prompt caching](https://docs.langchain.com"
},
{
"path": "libs/core/langchain_core/callbacks/__init__.py",
"chars": 4221,
"preview": "\"\"\"Callback handlers allow listening to events in LangChain.\"\"\"\n\nfrom typing import TYPE_CHECKING\n\nfrom langchain_core._"
},
{
"path": "libs/core/langchain_core/callbacks/base.py",
"chars": 34575,
"preview": "\"\"\"Base callback handler for LangChain.\"\"\"\n\nfrom __future__ import annotations\n\nimport logging\nfrom typing import TYPE_C"
},
{
"path": "libs/core/langchain_core/callbacks/file.py",
"chars": 8340,
"preview": "\"\"\"Callback handler that writes to a file.\"\"\"\n\nfrom __future__ import annotations\n\nfrom pathlib import Path\nfrom typing "
},
{
"path": "libs/core/langchain_core/callbacks/manager.py",
"chars": 85680,
"preview": "\"\"\"Run managers.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport atexit\nimport functools\nimport logging\nfr"
},
{
"path": "libs/core/langchain_core/callbacks/stdout.py",
"chars": 3695,
"preview": "\"\"\"Callback handler that prints to std out.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING, An"
},
{
"path": "libs/core/langchain_core/callbacks/streaming_stdout.py",
"chars": 4334,
"preview": "\"\"\"Callback Handler streams to stdout on new llm token.\"\"\"\n\nfrom __future__ import annotations\n\nimport sys\nfrom typing i"
},
{
"path": "libs/core/langchain_core/callbacks/usage.py",
"chars": 5128,
"preview": "\"\"\"Callback Handler that tracks `AIMessage.usage_metadata`.\"\"\"\n\nimport threading\nfrom collections.abc import Generator\nf"
},
{
"path": "libs/core/langchain_core/chat_history.py",
"chars": 8456,
"preview": "\"\"\"Chat message history stores a history of the message interactions in a chat.\"\"\"\n\nfrom __future__ import annotations\n\n"
},
{
"path": "libs/core/langchain_core/chat_loaders.py",
"chars": 601,
"preview": "\"\"\"Chat loaders.\"\"\"\n\nfrom abc import ABC, abstractmethod\nfrom collections.abc import Iterator\n\nfrom langchain_core.chat_"
},
{
"path": "libs/core/langchain_core/chat_sessions.py",
"chars": 565,
"preview": "\"\"\"**Chat Sessions** are a collection of messages and function calls.\"\"\"\n\nfrom collections.abc import Sequence\nfrom typi"
},
{
"path": "libs/core/langchain_core/cross_encoders.py",
"chars": 394,
"preview": "\"\"\"Cross Encoder interface.\"\"\"\n\nfrom abc import ABC, abstractmethod\n\n\nclass BaseCrossEncoder(ABC):\n \"\"\"Interface for "
},
{
"path": "libs/core/langchain_core/document_loaders/__init__.py",
"chars": 975,
"preview": "\"\"\"Document loaders.\"\"\"\n\nfrom typing import TYPE_CHECKING\n\nfrom langchain_core._import_utils import import_attr\n\nif TYPE"
},
{
"path": "libs/core/langchain_core/document_loaders/base.py",
"chars": 4773,
"preview": "\"\"\"Abstract interface for document loader implementations.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc import ABC, "
},
{
"path": "libs/core/langchain_core/document_loaders/blob_loaders.py",
"chars": 1070,
"preview": "\"\"\"Schema for Blobs and Blob Loaders.\n\nThe goal is to facilitate decoupling of content loading from content parsing code"
},
{
"path": "libs/core/langchain_core/document_loaders/langsmith.py",
"chars": 5374,
"preview": "\"\"\"LangSmith document loader.\"\"\"\n\nimport datetime\nimport json\nimport uuid\nfrom collections.abc import Callable, Iterator"
},
{
"path": "libs/core/langchain_core/documents/__init__.py",
"chars": 2013,
"preview": "\"\"\"Documents module for data retrieval and processing workflows.\n\nThis module provides core abstractions for handling da"
},
{
"path": "libs/core/langchain_core/documents/base.py",
"chars": 11124,
"preview": "\"\"\"Base classes for media and documents.\n\nThis module contains core abstractions for **data retrieval and processing wor"
},
{
"path": "libs/core/langchain_core/documents/compressor.py",
"chars": 2017,
"preview": "\"\"\"Document compressor.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc import ABC, abstractmethod\nfrom typing import T"
},
{
"path": "libs/core/langchain_core/documents/transformers.py",
"chars": 2543,
"preview": "\"\"\"Document transformers.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc import ABC, abstractmethod\nfrom typing import"
},
{
"path": "libs/core/langchain_core/embeddings/__init__.py",
"chars": 774,
"preview": "\"\"\"Embeddings.\"\"\"\n\nfrom typing import TYPE_CHECKING\n\nfrom langchain_core._import_utils import import_attr\n\nif TYPE_CHECK"
},
{
"path": "libs/core/langchain_core/embeddings/embeddings.py",
"chars": 2405,
"preview": "\"\"\"**Embeddings** interface.\"\"\"\n\nfrom abc import ABC, abstractmethod\n\nfrom langchain_core.runnables.config import run_in"
},
{
"path": "libs/core/langchain_core/embeddings/fake.py",
"chars": 3886,
"preview": "\"\"\"Module contains a few fake embedding models for testing purposes.\"\"\"\n\n# Please do not add additional fake embedding m"
},
{
"path": "libs/core/langchain_core/env.py",
"chars": 598,
"preview": "\"\"\"Utilities for getting information about the runtime environment.\"\"\"\n\nimport platform\nfrom functools import lru_cache\n"
},
{
"path": "libs/core/langchain_core/example_selectors/__init__.py",
"chars": 1394,
"preview": "\"\"\"Example selectors.\n\n**Example selector** implements logic for selecting examples to include them in prompts.\nThis all"
},
{
"path": "libs/core/langchain_core/example_selectors/base.py",
"chars": 1716,
"preview": "\"\"\"Interface for selecting examples to include in prompts.\"\"\"\n\nfrom abc import ABC, abstractmethod\nfrom typing import An"
},
{
"path": "libs/core/langchain_core/example_selectors/length_based.py",
"chars": 4379,
"preview": "\"\"\"Select examples based on length.\"\"\"\n\nimport re\nfrom collections.abc import Callable\n\nfrom pydantic import BaseModel, "
},
{
"path": "libs/core/langchain_core/example_selectors/semantic_similarity.py",
"chars": 13577,
"preview": "\"\"\"Example selector that selects examples based on SemanticSimilarity.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc "
},
{
"path": "libs/core/langchain_core/exceptions.py",
"chars": 3845,
"preview": "\"\"\"Custom **exceptions** for LangChain.\"\"\"\n\nfrom enum import Enum\nfrom typing import Any\n\n\nclass LangChainException(Exce"
},
{
"path": "libs/core/langchain_core/globals.py",
"chars": 1852,
"preview": "\"\"\"Global values and configuration that apply to all of LangChain.\"\"\"\n\nfrom typing import TYPE_CHECKING, Optional\n\nif TY"
},
{
"path": "libs/core/langchain_core/indexing/__init__.py",
"chars": 1276,
"preview": "\"\"\"Code to help indexing data into a vectorstore.\n\nThis package contains helper logic to help deal with indexing data in"
},
{
"path": "libs/core/langchain_core/indexing/api.py",
"chars": 38840,
"preview": "\"\"\"Module contains logic for indexing documents into vector stores.\"\"\"\n\nfrom __future__ import annotations\n\nimport hashl"
},
{
"path": "libs/core/langchain_core/indexing/base.py",
"chars": 22449,
"preview": "\"\"\"Base classes for indexing.\"\"\"\n\nfrom __future__ import annotations\n\nimport abc\nimport time\nfrom abc import ABC, abstra"
},
{
"path": "libs/core/langchain_core/indexing/in_memory.py",
"chars": 3283,
"preview": "\"\"\"In memory document index.\"\"\"\n\nimport operator\nimport uuid\nfrom collections.abc import Sequence\nfrom typing import Any"
},
{
"path": "libs/core/langchain_core/language_models/__init__.py",
"chars": 3646,
"preview": "\"\"\"Core language model abstractions.\n\nLangChain has two main classes to work with language models: chat models and\n\"old-"
},
{
"path": "libs/core/langchain_core/language_models/_utils.py",
"chars": 11046,
"preview": "import re\nfrom collections.abc import Sequence\nfrom typing import (\n TYPE_CHECKING,\n Literal,\n TypedDict,\n T"
},
{
"path": "libs/core/langchain_core/language_models/base.py",
"chars": 12956,
"preview": "\"\"\"Base language models class.\"\"\"\n\nfrom __future__ import annotations\n\nimport warnings\nfrom abc import ABC, abstractmeth"
},
{
"path": "libs/core/langchain_core/language_models/chat_models.py",
"chars": 75602,
"preview": "\"\"\"Chat models for conversational AI.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport contextlib\nimport in"
},
{
"path": "libs/core/langchain_core/language_models/fake.py",
"chars": 3732,
"preview": "\"\"\"Fake LLMs for testing purposes.\"\"\"\n\nimport asyncio\nimport time\nfrom collections.abc import AsyncIterator, Iterator, M"
},
{
"path": "libs/core/langchain_core/language_models/fake_chat_models.py",
"chars": 13619,
"preview": "\"\"\"Fake chat models for testing purposes.\"\"\"\n\nimport asyncio\nimport re\nimport time\nfrom collections.abc import AsyncIter"
},
{
"path": "libs/core/langchain_core/language_models/llms.py",
"chars": 54523,
"preview": "\"\"\"Base interface for traditional large language models (LLMs) to expose.\n\nThese are traditionally older models (newer m"
},
{
"path": "libs/core/langchain_core/language_models/model_profile.py",
"chars": 4898,
"preview": "\"\"\"Model profile types and utilities.\"\"\"\n\nimport logging\nimport warnings\nfrom typing import get_type_hints\n\nfrom pydanti"
},
{
"path": "libs/core/langchain_core/load/__init__.py",
"chars": 1238,
"preview": "\"\"\"**Load** module helps with serialization and deserialization.\"\"\"\n\nfrom typing import TYPE_CHECKING\n\nfrom langchain_co"
},
{
"path": "libs/core/langchain_core/load/_validation.py",
"chars": 5891,
"preview": "\"\"\"Validation utilities for LangChain serialization.\n\nProvides escape-based protection against injection attacks in seri"
},
{
"path": "libs/core/langchain_core/load/dump.py",
"chars": 3883,
"preview": "\"\"\"Serialize LangChain objects to JSON.\n\nProvides `dumps` (to JSON string) and `dumpd` (to dict) for serializing\n`Serial"
},
{
"path": "libs/core/langchain_core/load/load.py",
"chars": 28707,
"preview": "\"\"\"Load LangChain objects from JSON strings or objects.\n\n## How it works\n\nEach `Serializable` LangChain object has a uni"
},
{
"path": "libs/core/langchain_core/load/mapping.py",
"chars": 29690,
"preview": "\"\"\"Serialization mapping.\n\nThis file contains a mapping between the `lc_namespace` path for a given\nsubclass that implem"
},
{
"path": "libs/core/langchain_core/load/serializable.py",
"chars": 12791,
"preview": "\"\"\"Serializable base class.\"\"\"\n\nimport contextlib\nimport logging\nfrom abc import ABC\nfrom typing import (\n Any,\n L"
},
{
"path": "libs/core/langchain_core/messages/__init__.py",
"chars": 5723,
"preview": "\"\"\"**Messages** are objects used in prompts and chat conversations.\"\"\"\n\nfrom typing import TYPE_CHECKING\n\nfrom langchain"
},
{
"path": "libs/core/langchain_core/messages/ai.py",
"chars": 28222,
"preview": "\"\"\"AI message.\"\"\"\n\nimport itertools\nimport json\nimport logging\nimport operator\nfrom collections.abc import Sequence\nfrom"
},
{
"path": "libs/core/langchain_core/messages/base.py",
"chars": 17472,
"preview": "\"\"\"Base message.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Any, cast, overload\n\nfrom pyd"
},
{
"path": "libs/core/langchain_core/messages/block_translators/__init__.py",
"chars": 4249,
"preview": "\"\"\"Derivations of standard content blocks from provider content.\n\n`AIMessage` will first attempt to use a provider-speci"
},
{
"path": "libs/core/langchain_core/messages/block_translators/anthropic.py",
"chars": 20012,
"preview": "\"\"\"Derivations of standard content blocks from Anthropic content.\"\"\"\n\nimport json\nfrom collections.abc import Iterator\nf"
},
{
"path": "libs/core/langchain_core/messages/block_translators/bedrock.py",
"chars": 3735,
"preview": "\"\"\"Derivations of standard content blocks from Bedrock content.\"\"\"\n\nfrom langchain_core.messages import AIMessage, AIMes"
},
{
"path": "libs/core/langchain_core/messages/block_translators/bedrock_converse.py",
"chars": 12522,
"preview": "\"\"\"Derivations of standard content blocks from Amazon (Bedrock Converse) content.\"\"\"\n\nimport base64\nfrom collections.abc"
},
{
"path": "libs/core/langchain_core/messages/block_translators/google_genai.py",
"chars": 23154,
"preview": "\"\"\"Derivations of standard content blocks from Google (GenAI) content.\"\"\"\n\nimport base64\nimport re\nfrom collections.abc "
},
{
"path": "libs/core/langchain_core/messages/block_translators/google_vertexai.py",
"chars": 632,
"preview": "\"\"\"Derivations of standard content blocks from Google (VertexAI) content.\"\"\"\n\nfrom langchain_core.messages.block_transla"
},
{
"path": "libs/core/langchain_core/messages/block_translators/groq.py",
"chars": 5622,
"preview": "\"\"\"Derivations of standard content blocks from Groq content.\"\"\"\n\nimport json\nimport re\nfrom typing import Any\n\nfrom lang"
},
{
"path": "libs/core/langchain_core/messages/block_translators/langchain_v0.py",
"chars": 11658,
"preview": "\"\"\"Derivations of standard content blocks from LangChain v0 multimodal content.\"\"\"\n\nfrom typing import Any, cast\n\nfrom l"
},
{
"path": "libs/core/langchain_core/messages/block_translators/openai.py",
"chars": 42116,
"preview": "\"\"\"Derivations of standard content blocks from OpenAI content.\"\"\"\n\nfrom __future__ import annotations\n\nimport json\nimpor"
},
{
"path": "libs/core/langchain_core/messages/chat.py",
"chars": 2204,
"preview": "\"\"\"Chat Message.\"\"\"\n\nfrom typing import Any, Literal\n\nfrom typing_extensions import override\n\nfrom langchain_core.messag"
},
{
"path": "libs/core/langchain_core/messages/content.py",
"chars": 42397,
"preview": "\"\"\"Standard, multimodal content blocks for Large Language Model I/O.\n\nThis module provides standardized data structures "
},
{
"path": "libs/core/langchain_core/messages/function.py",
"chars": 2094,
"preview": "\"\"\"Function Message.\"\"\"\n\nfrom typing import Any, Literal\n\nfrom typing_extensions import override\n\nfrom langchain_core.me"
},
{
"path": "libs/core/langchain_core/messages/human.py",
"chars": 2130,
"preview": "\"\"\"Human message.\"\"\"\n\nfrom typing import Any, Literal, cast, overload\n\nfrom langchain_core.messages import content as ty"
},
{
"path": "libs/core/langchain_core/messages/modifier.py",
"chars": 875,
"preview": "\"\"\"Message responsible for deleting other messages.\"\"\"\n\nfrom typing import Any, Literal\n\nfrom langchain_core.messages.ba"
},
{
"path": "libs/core/langchain_core/messages/system.py",
"chars": 2140,
"preview": "\"\"\"System message.\"\"\"\n\nfrom typing import Any, Literal, cast, overload\n\nfrom langchain_core.messages import content as t"
},
{
"path": "libs/core/langchain_core/messages/tool.py",
"chars": 13228,
"preview": "\"\"\"Messages for tools.\"\"\"\n\nimport json\nfrom typing import Any, Literal, cast, overload\nfrom uuid import UUID\n\nfrom pydan"
},
{
"path": "libs/core/langchain_core/messages/utils.py",
"chars": 91476,
"preview": "\"\"\"Module contains utility functions for working with messages.\n\nSome examples of what you can do with these functions i"
},
{
"path": "libs/core/langchain_core/output_parsers/__init__.py",
"chars": 3487,
"preview": "\"\"\"`OutputParser` classes parse the output of an LLM call into structured data.\n\n!!! tip \"Structured output\"\n\n Output"
},
{
"path": "libs/core/langchain_core/output_parsers/base.py",
"chars": 11221,
"preview": "\"\"\"Base parser for language model outputs.\"\"\"\n\nfrom __future__ import annotations\n\nimport contextlib\nfrom abc import ABC"
},
{
"path": "libs/core/langchain_core/output_parsers/format_instructions.py",
"chars": 1106,
"preview": "\"\"\"Format instructions.\"\"\"\n\nJSON_FORMAT_INSTRUCTIONS = \"\"\"STRICT OUTPUT FORMAT:\n- Return only the JSON value that confor"
},
{
"path": "libs/core/langchain_core/output_parsers/json.py",
"chars": 4658,
"preview": "\"\"\"Parser for JSON output.\"\"\"\n\nfrom __future__ import annotations\n\nimport json\nfrom json import JSONDecodeError\nfrom typ"
},
{
"path": "libs/core/langchain_core/output_parsers/list.py",
"chars": 7255,
"preview": "\"\"\"Parsers for list output.\"\"\"\n\nfrom __future__ import annotations\n\nimport csv\nimport re\nfrom abc import abstractmethod\n"
},
{
"path": "libs/core/langchain_core/output_parsers/openai_functions.py",
"chars": 10604,
"preview": "\"\"\"Parsers for OpenAI functions output.\"\"\"\n\nimport copy\nimport json\nfrom types import GenericAlias\nfrom typing import An"
},
{
"path": "libs/core/langchain_core/output_parsers/openai_tools.py",
"chars": 13195,
"preview": "\"\"\"Parse tools for OpenAI tools output.\"\"\"\n\nimport copy\nimport json\nimport logging\nfrom json import JSONDecodeError\nfrom"
},
{
"path": "libs/core/langchain_core/output_parsers/pydantic.py",
"chars": 4776,
"preview": "\"\"\"Output parsers using Pydantic.\"\"\"\n\nimport json\nfrom typing import Annotated, Generic, Literal, overload\n\nimport pydan"
},
{
"path": "libs/core/langchain_core/output_parsers/string.py",
"chars": 1890,
"preview": "\"\"\"String output parser.\"\"\"\n\nfrom typing_extensions import override\n\nfrom langchain_core.output_parsers.transform import"
},
{
"path": "libs/core/langchain_core/output_parsers/transform.py",
"chars": 5835,
"preview": "\"\"\"Base classes for output parsers that can handle streaming input.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing "
},
{
"path": "libs/core/langchain_core/output_parsers/xml.py",
"chars": 11019,
"preview": "\"\"\"Output parser for XML format.\"\"\"\n\nimport contextlib\nimport re\nimport xml\nimport xml.etree.ElementTree as ET\nfrom coll"
},
{
"path": "libs/core/langchain_core/outputs/__init__.py",
"chars": 2117,
"preview": "\"\"\"Output classes.\n\nUsed to represent the output of a language model call and the output of a chat.\n\nThe top container f"
},
{
"path": "libs/core/langchain_core/outputs/chat_generation.py",
"chars": 5293,
"preview": "\"\"\"Chat generation output classes.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Literal\n\nfr"
},
{
"path": "libs/core/langchain_core/outputs/chat_result.py",
"chars": 1349,
"preview": "\"\"\"Chat result schema.\"\"\"\n\nfrom pydantic import BaseModel\n\nfrom langchain_core.outputs.chat_generation import ChatGenera"
},
{
"path": "libs/core/langchain_core/outputs/generation.py",
"chars": 2572,
"preview": "\"\"\"Generation output schema.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import Any, Literal\n\nfrom langchain_cor"
},
{
"path": "libs/core/langchain_core/outputs/llm_result.py",
"chars": 3952,
"preview": "\"\"\"`LLMResult` class.\"\"\"\n\nfrom __future__ import annotations\n\nfrom copy import deepcopy\nfrom typing import Literal\n\nfrom"
},
{
"path": "libs/core/langchain_core/outputs/run_info.py",
"chars": 618,
"preview": "\"\"\"`RunInfo` class.\"\"\"\n\nfrom __future__ import annotations\n\nfrom uuid import UUID\n\nfrom pydantic import BaseModel\n\n\nclas"
},
{
"path": "libs/core/langchain_core/prompt_values.py",
"chars": 4512,
"preview": "\"\"\"**Prompt values** for language model prompts.\n\nPrompt values are used to represent different pieces of prompts. They "
},
{
"path": "libs/core/langchain_core/prompts/__init__.py",
"chars": 3031,
"preview": "\"\"\"A prompt is the input to the model.\n\nPrompt is often constructed from multiple components and prompt values. Prompt c"
},
{
"path": "libs/core/langchain_core/prompts/base.py",
"chars": 16351,
"preview": "\"\"\"Base class for prompt templates.\"\"\"\n\nfrom __future__ import annotations\n\nimport builtins # noqa: TC003\nimport contex"
},
{
"path": "libs/core/langchain_core/prompts/chat.py",
"chars": 51442,
"preview": "\"\"\"Chat prompt template.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc import ABC, abstractmethod\nfrom collections.ab"
},
{
"path": "libs/core/langchain_core/prompts/dict.py",
"chars": 5015,
"preview": "\"\"\"Dictionary prompt template.\"\"\"\n\nimport warnings\nfrom functools import cached_property\nfrom typing import Any, Literal"
},
{
"path": "libs/core/langchain_core/prompts/few_shot.py",
"chars": 16134,
"preview": "\"\"\"Prompt template that contains few shot examples.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHEC"
},
{
"path": "libs/core/langchain_core/prompts/few_shot_with_templates.py",
"chars": 8199,
"preview": "\"\"\"Prompt template that contains few shot examples.\"\"\"\n\nfrom pathlib import Path\nfrom typing import Any\n\nfrom pydantic i"
},
{
"path": "libs/core/langchain_core/prompts/image.py",
"chars": 4848,
"preview": "\"\"\"Image prompt template for a multimodal model.\"\"\"\n\nfrom typing import Any, Literal, cast\n\nfrom pydantic import Field\n\n"
},
{
"path": "libs/core/langchain_core/prompts/loading.py",
"chars": 10204,
"preview": "\"\"\"Load prompts.\"\"\"\n\nimport json\nimport logging\nfrom collections.abc import Callable\nfrom pathlib import Path\n\nimport ya"
},
{
"path": "libs/core/langchain_core/prompts/message.py",
"chars": 2652,
"preview": "\"\"\"Message prompt templates.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc import ABC, abstractmethod\nfrom typing imp"
},
{
"path": "libs/core/langchain_core/prompts/prompt.py",
"chars": 10937,
"preview": "\"\"\"Prompt schema definition.\"\"\"\n\nfrom __future__ import annotations\n\nfrom pathlib import Path\nfrom typing import TYPE_CH"
},
{
"path": "libs/core/langchain_core/prompts/string.py",
"chars": 12378,
"preview": "\"\"\"`BasePrompt` schema definition.\"\"\"\n\nfrom __future__ import annotations\n\nimport warnings\nfrom abc import ABC, abstract"
},
{
"path": "libs/core/langchain_core/prompts/structured.py",
"chars": 6081,
"preview": "\"\"\"Structured prompt template for a language model.\"\"\"\n\nfrom collections.abc import AsyncIterator, Callable, Iterator, M"
},
{
"path": "libs/core/langchain_core/py.typed",
"chars": 0,
"preview": ""
},
{
"path": "libs/core/langchain_core/rate_limiters.py",
"chars": 9385,
"preview": "\"\"\"Interface for a rate limiter and an in-memory rate limiter.\"\"\"\n\nfrom __future__ import annotations\n\nimport abc\nimport"
},
{
"path": "libs/core/langchain_core/retrievers.py",
"chars": 11140,
"preview": "\"\"\"**Retriever** class returns `Document` objects given a text **query**.\n\nIt is more general than a vector store. A ret"
},
{
"path": "libs/core/langchain_core/runnables/__init__.py",
"chars": 3872,
"preview": "\"\"\"LangChain **Runnable** and the **LangChain Expression Language (LCEL)**.\n\nThe LangChain Expression Language (LCEL) of"
},
{
"path": "libs/core/langchain_core/runnables/base.py",
"chars": 222076,
"preview": "\"\"\"Base classes and utilities for `Runnable` objects.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport coll"
},
{
"path": "libs/core/langchain_core/runnables/branch.py",
"chars": 15777,
"preview": "\"\"\"Runnable that selects which branch to run based on a condition.\"\"\"\n\nfrom collections.abc import (\n AsyncIterator,\n"
},
{
"path": "libs/core/langchain_core/runnables/config.py",
"chars": 20247,
"preview": "\"\"\"Configuration utilities for `Runnable` objects.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\n\n# Cannot move"
},
{
"path": "libs/core/langchain_core/runnables/configurable.py",
"chars": 24053,
"preview": "\"\"\"`Runnable` objects that can be dynamically configured.\"\"\"\n\nfrom __future__ import annotations\n\nimport enum\nimport thr"
},
{
"path": "libs/core/langchain_core/runnables/fallbacks.py",
"chars": 24430,
"preview": "\"\"\"`Runnable` that can fallback to other `Runnable` objects if it fails.\"\"\"\n\nimport asyncio\nimport inspect\nimport typing"
},
{
"path": "libs/core/langchain_core/runnables/graph.py",
"chars": 23103,
"preview": "\"\"\"Graph used in `Runnable` objects.\"\"\"\n\nfrom __future__ import annotations\n\nimport inspect\nfrom collections import defa"
},
{
"path": "libs/core/langchain_core/runnables/graph_ascii.py",
"chars": 10405,
"preview": "\"\"\"Draws DAG in ASCII.\n\nAdapted from https://github.com/iterative/dvc/blob/main/dvc/dagascii.py.\n\"\"\"\n\nfrom __future__ im"
},
{
"path": "libs/core/langchain_core/runnables/graph_mermaid.py",
"chars": 17538,
"preview": "\"\"\"Mermaid graph drawing utilities.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport base64\nimport random\ni"
},
{
"path": "libs/core/langchain_core/runnables/graph_png.py",
"chars": 6513,
"preview": "\"\"\"Helper class to draw a state graph into a PNG file.\"\"\"\n\nfrom itertools import groupby\nfrom typing import Any, cast\n\nf"
},
{
"path": "libs/core/langchain_core/runnables/history.py",
"chars": 24360,
"preview": "\"\"\"`Runnable` that manages chat message history for another `Runnable`.\"\"\"\n\nfrom __future__ import annotations\n\nimport i"
},
{
"path": "libs/core/langchain_core/runnables/passthrough.py",
"chars": 25971,
"preview": "\"\"\"Implementation of the `RunnablePassthrough`.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport inspect\nim"
},
{
"path": "libs/core/langchain_core/runnables/retry.py",
"chars": 13686,
"preview": "\"\"\"`Runnable` that retries a `Runnable` if it fails.\"\"\"\n\nfrom typing import (\n TYPE_CHECKING,\n Any,\n TypeVar,\n "
},
{
"path": "libs/core/langchain_core/runnables/router.py",
"chars": 7175,
"preview": "\"\"\"`Runnable` that routes to a set of `Runnable` objects.\"\"\"\n\nfrom __future__ import annotations\n\nfrom collections.abc i"
},
{
"path": "libs/core/langchain_core/runnables/schema.py",
"chars": 5982,
"preview": "\"\"\"Module contains typedefs that are used with `Runnable` objects.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing i"
},
{
"path": "libs/core/langchain_core/runnables/utils.py",
"chars": 22342,
"preview": "\"\"\"Utility code for `Runnable` objects.\"\"\"\n\nfrom __future__ import annotations\n\nimport ast\nimport asyncio\nimport inspect"
},
{
"path": "libs/core/langchain_core/stores.py",
"chars": 9214,
"preview": "\"\"\"**Store** implements the key-value stores and storage helpers.\n\nModule provides implementations of various key-value "
},
{
"path": "libs/core/langchain_core/structured_query.py",
"chars": 5129,
"preview": "\"\"\"Internal representation of a structured query language.\"\"\"\n\nfrom __future__ import annotations\n\nfrom abc import ABC, "
},
{
"path": "libs/core/langchain_core/sys_info.py",
"chars": 3833,
"preview": "\"\"\"Print information about the system and langchain packages for debugging purposes.\"\"\"\n\nimport pkgutil\nimport platform\n"
},
{
"path": "libs/core/langchain_core/tools/__init__.py",
"chars": 2509,
"preview": "\"\"\"Tools are classes that an Agent uses to interact with the world.\n\nEach tool has a description. Agent uses the descrip"
},
{
"path": "libs/core/langchain_core/tools/base.py",
"chars": 55411,
"preview": "\"\"\"Base classes and utilities for LangChain tools.\"\"\"\n\nfrom __future__ import annotations\n\nimport functools\nimport inspe"
},
{
"path": "libs/core/langchain_core/tools/convert.py",
"chars": 17033,
"preview": "\"\"\"Convert functions and runnables to tools.\"\"\"\n\nimport inspect\nfrom collections.abc import Callable\nfrom typing import "
},
{
"path": "libs/core/langchain_core/tools/render.py",
"chars": 1817,
"preview": "\"\"\"Utilities to render tools.\"\"\"\n\nfrom __future__ import annotations\n\nfrom collections.abc import Callable\nfrom inspect "
},
{
"path": "libs/core/langchain_core/tools/retriever.py",
"chars": 3211,
"preview": "\"\"\"Retriever tool.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Literal\n\nfrom pydantic impo"
},
{
"path": "libs/core/langchain_core/tools/simple.py",
"chars": 6744,
"preview": "\"\"\"Tool that takes in function or coroutine directly.\"\"\"\n\nfrom __future__ import annotations\n\nfrom collections.abc impor"
},
{
"path": "libs/core/langchain_core/tools/structured.py",
"chars": 9742,
"preview": "\"\"\"Structured tool.\"\"\"\n\nfrom __future__ import annotations\n\nimport functools\nimport textwrap\nfrom collections.abc import"
},
{
"path": "libs/core/langchain_core/tracers/__init__.py",
"chars": 1358,
"preview": "\"\"\"Tracers are classes for tracing runs.\"\"\"\n\nfrom typing import TYPE_CHECKING\n\nfrom langchain_core._import_utils import "
},
{
"path": "libs/core/langchain_core/tracers/_compat.py",
"chars": 2824,
"preview": "\"\"\"Compatibility helpers for Pydantic v1/v2 with langsmith `Run` objects.\n\n!!! note\n\n The generic helpers (`pydantic_"
},
{
"path": "libs/core/langchain_core/tracers/_streaming.py",
"chars": 986,
"preview": "\"\"\"Internal tracers used for `stream_log` and `astream` events implementations.\"\"\"\n\nimport typing\nfrom collections.abc i"
},
{
"path": "libs/core/langchain_core/tracers/base.py",
"chars": 26371,
"preview": "\"\"\"Base interfaces for tracing runs.\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport logging\nfrom abc impo"
},
{
"path": "libs/core/langchain_core/tracers/context.py",
"chars": 6226,
"preview": "\"\"\"Context management for tracers.\"\"\"\n\nfrom __future__ import annotations\n\nfrom contextlib import contextmanager\nfrom co"
}
]
// ... and 2545 more files (download for full content)
About this extraction
This page contains the full source code of the langchain-ai/langchain GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 2745 files (12.1 MB), approximately 3.3M tokens, and a symbol index with 12145 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.