[
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\ncover/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\n.pybuilder/\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n#   For a library or package, you might want to ignore these files since the code is\n#   intended to run in multiple environments; otherwise, check them in:\n# .python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# poetry\n#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.\n#   This is especially recommended for binary packages to ensure reproducibility, and is more\n#   commonly ignored for libraries.\n#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control\n#poetry.lock\n\n# pdm\n#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.\n#pdm.lock\n#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it\n#   in version control.\n#   https://pdm.fming.dev/#use-with-ide\n.pdm.toml\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# pytype static type analyzer\n.pytype/\n\n# Cython debug symbols\ncython_debug/\n\n# PyCharm\n#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can\n#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore\n#  and can be added to the global gitignore or merged into this file.  For a more nuclear\n#  option (not recommended) you can uncomment the following to ignore the entire idea folder.\n#.idea/\n"
  },
  {
    "path": ".pre-commit-config.yaml",
    "content": "default_language_version:\n  python: python3.9\ndefault_stages: [commit]\n\nrepos:\n  # Common pre-commit hooks\n  - repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: v4.4.0\n    hooks:\n      - id: check-added-large-files\n        args: ['--maxkb=20000']\n      - id: check-ast\n      - id: check-builtin-literals\n      - id: check-case-conflict\n      - id: check-docstring-first\n      - id: check-executables-have-shebangs\n      - id: check-shebang-scripts-are-executable\n      - id: check-symlinks\n      - id: check-json\n      - id: check-toml\n      - id: check-yaml\n      - id: detect-private-key\n      - id: end-of-file-fixer\n      - id: pretty-format-json\n        args: ['--autofix']\n      - id: requirements-txt-fixer\n      - id: trailing-whitespace\n\n  - repo: https://github.com/Lucas-C/pre-commit-hooks\n    rev: v1.4.2\n    hooks:\n      - id: forbid-crlf\n        name: CRLF end-lines checker\n        description: 'Forbid files containing CRLF end-lines to be committed'\n        entry: forbid_crlf\n        language: python\n      - id: forbid-tabs\n        name: No-tabs checker\n        description: 'Forbid files containing tabs to be committed'\n        entry: forbid_tabs\n        language: python\n\n  - repo: https://github.com/hadialqattan/pycln\n    rev: v2.1.3\n    hooks:\n      - id: pycln\n        name: pycln\n        description: 'A formatter for finding and removing unused import statements.'\n        entry: pycln\n        language: python\n        language_version: python3\n        types: [python]\n\n  #- repo: local\n  #  hooks:\n  #    - id: jupyter-nb-clear-output\n  #      name: jupyter-nb-clear-output\n  #      description: 'Clear jupyter notebook cell-outputs'\n  #      entry: jupyter nbconvert --ClearOutputPreprocessor.enabled=True --inplace\n  #      files: \\.ipynb$\n  #      stages: [commit]\n  #      language: system\n\n  - repo: https://github.com/mwouts/jupytext\n    rev: v1.14.4\n    hooks:\n      - id: jupytext\n        name: jupytext\n        description: 'Runs jupytext on all notebooks and paired files.'\n        language: python\n        entry: jupytext --pre-commit-mode\n        require_serial: true\n        args: [--sync, --pipe, black]\n        additional_dependencies:\n          - black==22.1.0\n\n  #- repo: https://github.com/psf/black\n  #  rev: 23.1.0\n  #  hooks:\n  #    - id: black\n  #      name: black\n  #      description: 'Black: The uncompromising Python code formatter'\n  #      entry: black\n  #      language: python\n  #      minimum_pre_commit_version: 2.9.2\n  #      require_serial: true\n  #      types_or: [python, pyi]\n  #      args:\n  #        - '--line-length=120'\n\n  - repo: https://github.com/timothycrosley/isort\n    rev: 5.12.0\n    hooks:\n      - id: isort\n        name: isort\n        description: 'Sorts import statements'\n        entry: isort\n        require_serial: true\n        language: python\n        language_version: python3\n        types_or: [cython, pyi, python]\n        minimum_pre_commit_version: '2.9.2'\n        files: \"\\\\.(py)$\"\n        args:\n          - '--filter-files'\n          - '--profile=black'\n          - '--multi-line=3'\n          - '--line-length=120'\n          - '--trailing-comma'\n\n  - repo: https://github.com/PyCQA/flake8\n    rev: 6.0.0\n    hooks:\n      - id: flake8\n        name: flake8\n        description: '`flake8` is a command-line utility for enforcing style consistency across Python projects.'\n        entry: flake8\n        language: python\n        types: [python]\n        require_serial: true\n        exclude: ^scratch/\n        additional_dependencies:\n          [\n            'flake8-bugbear',\n            'flake8-comprehensions',\n            'flake8-mutable',\n            'flake8-print',\n            'flake8-simplify',\n          ]\n        args:\n          - '--max-line-length=120'\n          - '--max-complexity=12'\n          - '--ignore=E501,W503,E203,F405,F403,F401,C901'\n"
  },
  {
    "path": ".streamlit/config.toml",
    "content": "[server]\nmaxUploadSize = 256\n\n[browser]\ngatherUsageStats = false\n\n[theme]\nbase=\"dark\"\nprimaryColor=\"#39ff14\"\nbackgroundColor=\"#000000\"\ntextColor=\"#f5f5dc\"\nfont=\"serif\"\n"
  },
  {
    "path": "Dockerfile",
    "content": "# Currently tested & workong for Python 3.11\nFROM python:3.9-slim\n\n# Copy the current directory contents into the container at /app\nCOPY app /app\n\n# Copy and install the requirements\nCOPY ./requirements.txt /requirements.txt\n\n# Update default packages\nRUN apt-get -qq update\n\nRUN apt-get install -y -q \\\n    build-essential \\\n    curl\n\n# install gcc\nRUN apt-get -y install gcc\n\n# install rust\nRUN curl https://sh.rustup.rs -sSf | sh -s -- -y\n\nENV PATH=\"/root/.cargo/bin:${PATH}\"\n\n\n# Pip install the dependencies\nRUN pip install --upgrade pip\nRUN pip install --no-cache-dir -r /requirements.txt\n\n# Set the working directory to /app\nWORKDIR /app\n\n# Expose port 8501\nEXPOSE 8501\n\n# Run the app\nCMD streamlit run /app/01_❓_Ask.py\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2023 Benedict Neo\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# ClassGPT\n\n> ChatGPT for my lecture slides\n\n<img width=\"1251\" alt=\"SCR-20230307-isgj\" src=\"https://user-images.githubusercontent.com/49143413/223467346-473681e4-6203-4f31-a1f1-253829d4768a.png\">\n\nBuilt with [Streamlit](https://github.com/streamlit/streamlit), powered by [LlamaIndex](https://github.com/jerryjliu/gpt_index) and [LangChain](https://github.com/hwchase17/langchain).\n\nUses the latest [ChatGPT API](https://platform.openai.com/docs/guides/chat) from [OpenAI](https://openai.com/).\n\nInspired by [AthensGPT](http://athensgpt.com/)\n\n## App Demo\n\nhttps://user-images.githubusercontent.com/49143413/222878151-42354446-5234-41fa-ad36-002dd74a5408.mp4\n\n## How this works\n\n1. Parses pdf with [pypdf](https://pypi.org/project/pypdf/)\n2. Index Construction with LlamaIndex's `GPTSimpleVectorIndex`\n   - the `text-embedding-ada-002` model is used to create embeddings\n   - see [vector store index](https://gpt-index.readthedocs.io/en/latest/guides/index_guide.html#vector-store-index) page to learn more\n   - here's a [sample index](notebooks/index.json)\n3. indexes and files are stored on s3\n4. Query the index\n   - uses the latest ChatGPT model `gpt-3.5-turbo`\n\n## Usage\n\n### Configuration and secrets\n\n1. configure aws ([quickstart](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html))\n\n```bash\n    aws configure\n```\n\n2. create an s3 bucket with a unique name\n\n3. Change the bucket name in the codebase (look for `bucket_name = \"classgpt\"` to whatever you created.\n\n4. rename [.env.local.example] to `.env` and add your openai credentials\n\n### Locally\n\n1. create python env\n\n```bash\n    conda create -n classgpt python=3.9\n    conda activate classgpt\n```\n\n2. install dependencies\n\n```bash\n    pip install -r requirements.txt\n```\n\n3. run streamlit app\n\n```bash\n    cd app/\n    streamlit run app/01_❓_Ask.py\n```\n\n### Docker\n\nAlternative, you can use Docker\n\n```bash\n    docker compose up\n```\n\nThen open up a new tab and navigate to <http://localhost:8501/>\n\n## TODO\n\n- [ ] local mode for app (no s3)\n  - [ ] global variable use_s3 to toggle between local and s3 mode\n- [ ] deploy app to streamlit cloud\n  - [ ] have input box for openai key\n  - [ ] uses pyarrow local FS to store files\n- [ ] update code for new langchain update\n- [ ] Custom prompts and tweak settings\n  - [ ] create a settings page for tweaking model parameters and provide custom prompts [example](https://github.com/hayabhay/whisper-ui)\n- [ ] Add ability to query on multiple files\n  - [ ] Compose indices of multiple lectures and query on all of them\n  - [ ] loop through all existing index, create the ones that haven't been created, and compose them together\n  - references\n    - [Composability — LlamaIndex documentation](https://gpt-index.readthedocs.io/en/latest/how_to/composability.html)\n    - [gpt_index/ComposableIndices.ipynb](https://github.com/jerryjliu/gpt_index/blob/main/examples/composable_indices/ComposableIndices.ipynb)\n    - [Test Complex Queries over Multiple Documents](https://colab.research.google.com/drive/1IJAKd1HIe-LvFRQmd3BCDDIsq6CpOwBj?usp=sharing)\n\n## FAQ\n\n### Tokens\n\nTokens can be thought of as pieces of words. Before the API processes the prompts, the input is broken down into tokens. These tokens are not cut up exactly where the words start or end - tokens can include trailing spaces and even sub-words. Here are some helpful rules of thumb for understanding tokens in terms of lengths:\n\n- 1 token ~= 4 chars in English\n- 1 token ~= ¾ words\n- 100 tokens ~= 75 words\n- 1-2 sentence ~= 30 tokens\n- 1 paragraph ~= 100 tokens\n- 1,500 words ~= 2048 tokens\n\nTry the [OpenAI Tokenizer tool](https://platform.openai.com/tokenizer)\n\n[Source](https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them)\n\n### Embeddings\n\nAn embedding is a vector (list) of floating point numbers. The distance between two vectors measures their relatedness. Small distances suggest high relatedness and large distances suggest low relatedness.\n\nFor `text-embedding-ada-002`, cost is $0.0004 / 1k tokens or 3000 pages/dollar\n\n- [Embeddings - OpenAI API](https://platform.openai.com/docs/guides/embeddings/use-cases)\n- [What Are Word and Sentence Embeddings?](https://txt.cohere.ai/sentence-word-embeddings/)\n\n### Models\n\nFor `gpt-3.5-turbo` model (ChatGPTAPI) cost is `$0.002 / 1K tokens`\n\nFor `text-davinci-003` model, cost is `$0.02 / 1K tokens`\n\n- [Chat completion - OpenAI API](https://platform.openai.com/docs/guides/chat)\n\n## References\n\n### Streamlit\n\n- [Increase upload limit of st.file_uploader](https://docs.streamlit.io/knowledge-base/deploy/increase-file-uploader-limit-streamlit-cloud)\n- [st.cache_resource - Streamlit Docs](https://docs.streamlit.io/library/api-reference/performance/st.cache_resource)\n- [Session State](https://docs.streamlit.io/library/api-reference/session-state)\n- [hayabhay/whisper-ui: Streamlit UI for OpenAI's Whisper](https://github.com/hayabhay/whisper-ui)\n\n### Deplyoment\n\n- [Streamlit Deployment Guide (wiki) - 🚀 Deployment - Streamlit](https://discuss.streamlit.io/t/streamlit-deployment-guide-wiki/5099)\n- [How to Deploy a streamlit application to AWS? Part-3](https://www.youtube.com/watch?v=Jc5GI3v2jtE)\n\n### LlamaIndex\n\n- [LlamaIndex Usage Pattern](https://gpt-index.readthedocs.io/en/latest/guides/usage_pattern.html#)\n- [Saving index](https://gpt-index.readthedocs.io/en/latest/guides/usage_pattern.html#optional-save-the-index-for-future-use)\n\nLoading data\n\n- [PDF Loader](https://llamahub.ai/l/file-pdf)\n- [llama-hub github repo](https://github.com/emptycrown/llama-hub/tree/main)\n- [document class](https://github.com/jerryjliu/gpt_index/blob/f07050b84309d53842a3552d3546e765012d168c/gpt_index/readers/schema/base.py#L4)\n- [PDFReader class](https://github.com/emptycrown/llama-hub/blob/main/loader_hub/file/pdf/base.py)\n\nmultimodal\n\n- [llama_index/Multimodal.ipynb at main](https://github.com/jerryjliu/llama_index/blob/main/examples/multimodal/Multimodal.ipynb)\n\nChatGPT\n\n- [gpt_index/SimpleIndexDemo-ChatGPT.ipynb](https://github.com/jerryjliu/gpt_index/blob/main/examples/vector_indices/SimpleIndexDemo-ChatGPT.ipynb)\n\n### Langchain\n\n- [gpt_index/LangchainDemo.ipynb](https://github.com/jerryjliu/gpt_index/blob/main/examples/langchain_demo/LangchainDemo.ipynb)\n- [OpenAIChat](https://langchain.readthedocs.io/en/latest/modules/llms/integrations/openaichat.html)\n\n### Boto3\n\n- [boto3 file_upload does it check if file exists](https://stackoverflow.com/questions/44978426/boto3-file-upload-does-it-check-if-file-exists)\n- [Boto 3: Resource vs Client](https://www.learnaws.org/2021/02/24/boto3-resource-client/)\n- [Writing json to file in s3 bucket](https://stackoverflow.com/questions/46844263/writing-json-to-file-in-s3-bucket)\n\n### Docker stuff\n\n- [amazon web services - What is the best way to pass AWS credentials to a Docker container?](https://stackoverflow.com/questions/36354423/what-is-the-best-way-to-pass-aws-credentials-to-a-docker-container)\n- [docker-compose up failing due to: error: can't find Rust compiler · Issue #572 · acheong08/ChatGPT](https://github.com/acheong08/ChatGPT/issues/572)\n- [linux - When installing Rust toolchain in Docker, Bash `source` command doesn't work](https://stackoverflow.com/questions/49676490/when-installing-rust-toolchain-in-docker-bash-source-command-doesnt-work)\n- [software installation - How to install a package with apt without the \"Do you want to continue [Y/n]?\" prompt? - Ask Ubuntu](https://askubuntu.com/questions/523962/how-to-install-a-package-with-apt-without-the-do-you-want-to-continue-y-n-p)\n- [How to use sudo inside a docker container?](https://stackoverflow.com/questions/25845538/how-to-use-sudo-inside-a-docker-container)\n"
  },
  {
    "path": "app/01_❓_Ask.py",
    "content": "import streamlit as st\nfrom components.sidebar import sidebar\nfrom s3 import S3\nfrom utils import query_gpt, query_gpt_memory, show_pdf\n\nst.set_page_config(\n    page_title=\"ClassGPT\",\n    page_icon=\"🤖\",\n    layout=\"wide\",\n    initial_sidebar_state=\"expanded\",\n    menu_items={\n        \"Get Help\": \"https://twitter.com/benthecoder1\",\n        \"Report a bug\": \"https://github.com/benthecoder/ClassGPT/issues\",\n        \"About\": \"ClassGPT is a chatbot that answers questions about your pdf files\",\n    },\n)\n\n# Session states\n# --------------\nif \"chosen_class\" not in st.session_state:\n    st.session_state.chosen_class = \"--\"\n\nif \"chosen_pdf\" not in st.session_state:\n    st.session_state.chosen_pdf = \"--\"\n\nif \"memory\" not in st.session_state:\n    st.session_state.memory = \"\"\n\n\nsidebar()\n\nst.header(\"ClassGPT: ChatGPT for your lectures slides\")\n\nbucket_name = \"classgpt\"\ns3 = S3(bucket_name)\n\nall_classes = s3.list_files()\n\nchosen_class = st.selectbox(\n    \"Select a class\", list(all_classes.keys()) + [\"--\"], index=len(all_classes)\n)\n\nst.session_state.chosen_class = chosen_class\n\nif st.session_state.chosen_class != \"--\":\n    all_pdfs = all_classes[chosen_class]\n\n    chosen_pdf = st.selectbox(\n        \"Select a PDF file\", all_pdfs + [\"--\"], index=len(all_pdfs)\n    )\n\n    st.session_state.chosen_pdf = chosen_pdf\n\n    if st.session_state.chosen_pdf != \"--\":\n        col1, col2 = st.columns(2)\n\n        with col1:\n            st.subheader(\"Ask a question\")\n            st.markdown(\n                \"\"\"\n                Here are some prompts:\n                - `What is the main idea of this lecture in simple terms?`\n                - `Summarize the main points of slide 5`\n                - `Provide 5 practice questions on this lecture with answers`\n                \"\"\"\n            )\n            query = st.text_area(\"Enter your question\", max_chars=200)\n\n            if st.button(\"Ask\"):\n                if query == \"\":\n                    st.error(\"Please enter a question\")\n                with st.spinner(\"Generating answer...\"):\n                    # res = query_gpt_memory(chosen_class, chosen_pdf, query)\n                    res = query_gpt(chosen_class, chosen_pdf, query)\n                    st.markdown(res)\n\n                    # with st.expander(\"Memory\"):\n                    #      st.write(st.session_state.memory.replace(\"\\n\", \"\\n\\n\"))\n\n        with col2:\n            show_pdf(chosen_class, chosen_pdf)\n"
  },
  {
    "path": "app/components/sidebar.py",
    "content": "import os\n\nimport streamlit as st\n\n\ndef sidebar():\n    with st.sidebar:\n        st.markdown(\n            \"## How to use\\n\"\n            \"1. Add your files in 📁 Data page\\n\"\n            \"2. Ask a question on the ❓ Ask page\\n\"\n        )\n        api_key_input = st.text_input(\n            \"OpenAI API Key\",\n            type=\"password\",\n            placeholder=\"sk-xxx...\",\n            help=\"Get an API key here 👉 https://platform.openai.com/account/api-keys.\",\n            value=\"\",\n        )\n\n        if api_key_input:\n            os.environ[\"OPENAI_API_KEY\"] = api_key_input\n            st.success(\"API key set\")\n\n        st.markdown(\n            \"\"\"\n            ---\n            ## About\n\n            ClassGPT lets you ask questions about your class \\\n                lectures and get accurate answers\n\n            This tool is a work in progress.\n\n            Contributions are welcomed on [GitHub](https://github.com/benthecoder/ClassGPT)\n\n            Made with ♥️ by [Benedict Neo](https://benneo.super.site/)\n            \"\"\"\n        )\n"
  },
  {
    "path": "app/pages/02_📁_Data.py",
    "content": "import streamlit as st\nfrom components.sidebar import sidebar\nfrom s3 import S3\n\nsidebar()\nbucket_name = \"classgpt\"\ns3 = S3(bucket_name)\nall_classes = s3.list_files()\n\ntab1, tab2, tab3 = st.tabs([\"Upload data\", \"Add Class\", \"Delete\"])\n\nwith tab1:\n    st.subheader(\"Upload new lectures\")\n\n    chosen_class = st.selectbox(\n        \"Select a class\",\n        list(all_classes.keys()) + [\"--\"],\n        index=len(all_classes),\n    )\n\n    if chosen_class != \"--\":\n        with st.form(\"upload_pdf\"):\n            uploaded_files = st.file_uploader(\n                \"Upload a PDF file\", type=\"pdf\", accept_multiple_files=True\n            )\n\n            submit_button = st.form_submit_button(\"Upload\")\n\n            if submit_button:\n                if len(uploaded_files) == 0:\n                    st.error(\"Please upload at least one file\")\n                else:\n                    with st.spinner(f\"Uploading {len(uploaded_files)} files...\"):\n                        for uploaded_file in uploaded_files:\n                            s3.upload_files(\n                                uploaded_file, f\"{chosen_class}/{uploaded_file.name}\"\n                            )\n\n                        st.success(f\"{len(uploaded_files)} files uploaded\")\n\n\nwith tab2:\n    st.subheader(\"Add a new class\")\n\n    with st.form(\"add_class\"):\n        add_class = st.text_input(\"Enter a new class name\")\n\n        submit_button = st.form_submit_button(\"Add\")\n\n        if submit_button:\n            if add_class == \"\":\n                st.error(\"Please enter a class name\")\n            else:\n                s3.create_folder(add_class)\n                st.success(f\"Class {add_class} added\")\n\nwith tab3:\n    st.subheader(\"Delete a class or a PDF file\")\n\n    chosen_class = st.selectbox(\n        \"Select a class to delete\",\n        list(all_classes.keys()) + [\"--\"],\n        index=len(all_classes),\n    )\n\n    if chosen_class != \"--\":\n        all_pdfs = all_classes[chosen_class] + [\"all\"]\n\n        # Remove empty values\n        all_pdfs = [x for x in all_pdfs if x]\n\n        chosen_pdf = st.selectbox(\n            \"Select a PDF file or choose 'all' to delete the whole class\",\n            all_pdfs + [\"--\"],\n            index=len(all_pdfs),\n        )\n\n        if chosen_pdf != \"--\":\n            submit_button = st.button(\"Remove\")\n\n            if submit_button:\n                if chosen_pdf == \"all\":\n                    s3.remove_folder(chosen_class)\n                    st.success(f\"{chosen_class} removed\")\n                else:\n                    s3.remove_file(chosen_class, chosen_pdf)\n                    st.success(f\"{chosen_pdf} removed\")\n"
  },
  {
    "path": "app/s3.py",
    "content": "from collections import defaultdict\n\nimport boto3\nimport botocore\n\n\nclass S3:\n    \"\"\"\n    Class for interacting with S3\n\n    Args:\n        bucket_name (str): Name of the S3 bucket\n\n    Attributes:\n        bucket_name (str): Name of the S3 bucket\n        s3 (boto3.resource): S3 resource\n        bucket (boto3.Bucket): S3 bucket\n\n    Methods:\n        list_folders: List all folders in the S3 bucket\n        list_files: List all files in the S3 bucket\n        folder_exists: Check if a folder exists in the S3 bucket\n        file_exists: Check if a file exists in the S3 bucket\n        create_folder: Create a folder in the S3 bucket\n        upload_files: Upload a file to the S3 bucket\n        remove_folder: Remove a folder from the S3 bucket\n        remove_file: Remove a file from the S3 bucket\n    \"\"\"\n\n    def __init__(self, bucket_name):\n        self.bucket_name = bucket_name\n        self.s3 = boto3.resource(\"s3\")\n        self.bucket = self.s3.Bucket(bucket_name)\n\n    def list_folders(self):\n        folders = set()\n        for obj in self.bucket.objects.filter():\n            folders.add(obj.key.split(\"/\")[0])\n\n        return folders\n\n    def list_files(self):\n        classes = defaultdict(list)\n\n        # loop through only the parent directory\n        for obj in self.bucket.objects.filter():\n            cname, fname = obj.key.split(\"/\")\n            if not fname.endswith(\".json\"):\n                classes[cname].append(fname)\n\n        return classes\n\n    def folder_exists(self, folder_name):\n        for _ in self.bucket.objects.filter(Prefix=f\"{folder_name}/\"):\n            return True\n        return False\n\n    def file_exists(self, folder_name, file_name):\n        try:\n            self.s3.Object(self.bucket_name, f\"{folder_name}/{file_name}\").load()\n            return True\n        except botocore.exceptions.ClientError as e:\n            if e.response[\"Error\"][\"Code\"] == \"404\":\n                return False\n            else:\n                raise\n\n    def create_folder(self, folder_name):\n        if not self.folder_exists(folder_name):\n            self.bucket.put_object(Key=f\"{folder_name}/\")\n\n    def upload_files(self, file_obj, file_path):\n        self.bucket.upload_fileobj(file_obj, file_path)\n\n    def remove_folder(self, folder_name):\n        if self.folder_exists(folder_name):\n            for key in self.bucket.objects.filter(Prefix=f\"{folder_name}/\"):\n                key.delete()\n\n    def remove_file(self, folder_name, file_name):\n        if self.folder_exists(folder_name):\n            self.bucket.objects.filter(Prefix=f\"{folder_name}/{file_name}\").delete(\n                Delete={\"Objects\": [{\"Key\": f\"{folder_name}/{file_name}\"}]}\n            )\n\n    def download_file(self, from_file_path, to_file_path):\n        self.bucket.download_file(from_file_path, to_file_path)\n"
  },
  {
    "path": "app/utils.py",
    "content": "import base64\nimport logging\nimport os\nimport sys\nimport tempfile\nfrom io import BytesIO\n\nimport openai\nimport streamlit as st\nfrom dotenv import load_dotenv\nfrom langchain import OpenAI\n\n# langchain\nfrom langchain.agents import Tool, initialize_agent\nfrom langchain.chains.conversation.memory import ConversationBufferMemory\nfrom langchain.chat_models import ChatOpenAI\n\n# llama_index\nfrom llama_index import Document, GPTSimpleVectorIndex, LLMPredictor\nfrom pypdf import PdfReader\nfrom s3 import S3\n\n# set to DEBUG for more verbose logging\nlogging.basicConfig(stream=sys.stdout, level=logging.INFO)\n\n\nload_dotenv()\nif os.getenv(\"OPENAI_API_KEY\") is None:\n    st.error(\"OpenAI API key not set\")\nelse:\n    openai.api_key = os.getenv(\"OPENAI_API_KEY\")\n\n\ns3 = S3(\"classgpt\")\n\n\n# ------------------- index creation ------------------- #\n\n\ndef parse_pdf(file: BytesIO):\n\n    pdf = PdfReader(file)\n    text_list = []\n\n    # Get the number of pages in the PDF document\n    num_pages = len(pdf.pages)\n\n    # Iterate over every page\n    for page in range(num_pages):\n        # Extract the text from the page\n        page_text = pdf.pages[page].extract_text()\n        text_list.append(page_text)\n\n    text = \"\\n\".join(text_list)\n\n    return [Document(text)]\n\n\ndef create_index(pdf_obj, folder_name, file_name):\n    \"\"\"\n    Create an index for a given PDF file and upload it to S3.\n    \"\"\"\n    index_name = file_name.replace(\".pdf\", \".json\")\n\n    logging.info(\"Generating new index...\")\n    documents = parse_pdf(pdf_obj)\n\n    logging.info(\"Creating index...\")\n    index = GPTSimpleVectorIndex(documents)\n\n    with tempfile.TemporaryDirectory() as tmp_dir:\n        tmp_path = f\"{tmp_dir}/{index_name}\"\n        logging.info(\"Saving index...\")\n        index.save_to_disk(tmp_path)\n\n        with open(tmp_path, \"rb\") as f:\n            logging.info(\"Uploading index to s3...\")\n            s3.upload_files(f, f\"{folder_name}/{index_name}\")\n\n    return index\n\n\n@st.cache_resource(show_spinner=False)\ndef get_index(folder_name, file_name):\n    \"\"\"\n    Get the index for a given PDF file.\n    \"\"\"\n    index_name = file_name.replace(\".pdf\", \".json\")\n    index = None\n\n    if s3.file_exists(folder_name, index_name):\n        logging.info(\"Index found, loading index...\")\n        with tempfile.TemporaryDirectory() as tmp_dir:\n            tmp_path = f\"{tmp_dir}/{index_name}\"\n            s3.download_file(f\"{folder_name}/{index_name}\", tmp_path)\n            index = GPTSimpleVectorIndex.load_from_disk(tmp_path)\n\n    else:\n        logging.info(\"Index not found, generating index...\")\n        with tempfile.NamedTemporaryFile(\"wb\") as f_src:\n            logging.info(f\"{file_name} downloaded\")\n            s3.download_file(f\"{folder_name}/{file_name}\", f_src.name)\n\n            with open(f_src.name, \"rb\") as f:\n                index = create_index(f, folder_name, file_name)\n\n    return index\n\n\ndef query_gpt(chosen_class, chosen_pdf, query):\n\n    if not os.getenv(\"OPENAI_API_KEY\"):\n        st.error(\"Enter your OpenAI API key in the sidebar.\")\n        st.stop()\n\n    # LLM Predictor (gpt-3.5-turbo)\n    llm_predictor = LLMPredictor(\n        llm=ChatOpenAI(\n            temperature=0,\n            model_name=\"gpt-3.5-turbo\",\n        )\n    )\n\n    index = get_index(chosen_class, chosen_pdf)\n    response = index.query(query, llm_predictor=llm_predictor)\n\n    # logging.info(response.get_formatted_sources())\n\n    return response\n\n\n@st.cache_resource\ndef create_tool(_index, chosen_pdf):\n    tools = [\n        Tool(\n            name=f\"{chosen_pdf} index\",\n            func=lambda q: str(_index.query(q)),\n            description=\"Useful to answering questions about the given file\",\n            return_direct=True,\n        ),\n    ]\n\n    return tools\n\n\n@st.cache_resource\ndef create_agent(chosen_class, chosen_pdf):\n    memory = ConversationBufferMemory(memory_key=\"chat_history\")\n    llm = OpenAI(temperature=0, model_name=\"gpt-3.5-turbo\")\n\n    index = get_index(chosen_class, chosen_pdf)\n    tools = create_tool(index, chosen_pdf)\n\n    agent = initialize_agent(\n        tools, llm, agent=\"conversational-react-description\", memory=memory\n    )\n\n    return agent\n\n\ndef query_gpt_memory(chosen_class, chosen_pdf, query):\n\n    agent = create_agent(chosen_class, chosen_pdf)\n    res = \"\"\n\n    try:\n        res = agent.run(input=query)\n    except Exception as e:\n        logging.error(e)\n        res = \"Something went wrong... Please try again.\"\n\n    st.session_state.memory = agent.memory.buffer\n\n    return res\n\n\n# ------------------- Render PDF ------------------- #\n\n\n@st.cache_data\ndef show_pdf(folder_name, file_name):\n\n    with tempfile.NamedTemporaryFile(\"wb\") as f_src:\n        logging.info(f\"Downloading {file_name}...\")\n        s3.download_file(f\"{folder_name}/{file_name}\", f_src.name)\n\n        with open(f_src.name, \"rb\") as f:\n            base64_pdf = base64.b64encode(f.read()).decode(\"utf-8\")\n\n        pdf_display = f\"\"\"\n        <iframe\n            src=\"data:application/pdf;base64,{base64_pdf}\"\n            width=\"100%\" height=\"1000\"\n            type=\"application/pdf\"\n            style=\"min-width: 400px;\"\n        >\n        </iframe>\n        \"\"\"\n\n        st.markdown(pdf_display, unsafe_allow_html=True)\n"
  },
  {
    "path": "docker-compose.yml",
    "content": "version: '1'\n\nservices:\n  classgpt:\n    image: benthecoder/classgpt\n    build: .\n    container_name: classgpt\n    env_file:\n      - .env\n    volumes:\n      - ~/.aws/:/root/.aws:ro\n    restart: unless-stopped\n    ports:\n      - 8501:8501\n"
  },
  {
    "path": "notebooks/aws_s3.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# boto3 S3 operations\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import boto3\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## List buckets\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 34,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"classgpt\\n\",\n      \"hackathonfiles123\\n\",\n      \"kafka-stock-market-project-ben\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"def list_buckets():\\n\",\n    \"    s3 = boto3.resource('s3')\\n\",\n    \"    for bucket in s3.buckets.all():\\n\",\n    \"        print(bucket.name)\\n\",\n    \"\\n\",\n    \"list_buckets()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## list folders in bucket\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 26,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'COMS472', 'CPRE419', 'STAT474'}\"\n      ]\n     },\n     \"execution_count\": 26,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def list_folders():\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"    folders = set()\\n\",\n    \"    for obj in bucket.objects.all():\\n\",\n    \"        # split the key by / and take the first element\\n\",\n    \"        folders.add(obj.key.split(\\\"/\\\")[0])\\n\",\n    \"\\n\",\n    \"    return folders\\n\",\n    \"\\n\",\n    \"list_folders()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## List files\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 33,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"defaultdict(<class 'list'>,\\n\",\n      \"            {'COMS472': ['lecture01-intro-2up.pdf',\\n\",\n      \"                         'lecture02-agents-2up.pdf',\\n\",\n      \"                         'lecture03-1-search-2up.pdf',\\n\",\n      \"                         'lecture03-2-informedSearch-2up.pdf',\\n\",\n      \"                         'lecture04-localSearch-2up.pdf',\\n\",\n      \"                         'lecture05-CSP-2up.pdf',\\n\",\n      \"                         'lecture06-game-2up.pdf'],\\n\",\n      \"             'CPRE419': [''],\\n\",\n      \"             'STAT474': ['']})\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from collections import defaultdict\\n\",\n    \"from pprint import pprint\\n\",\n    \"\\n\",\n    \"def list_files():\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"\\n\",\n    \"    classes = defaultdict(list)\\n\",\n    \"    \\n\",\n    \"    # loop through only the parent directory\\n\",\n    \"    for obj in bucket.objects.filter():\\n\",\n    \"        cname, fname = obj.key.split(\\\"/\\\")\\n\",\n    \"        if not fname.endswith(\\\".json\\\"):\\n\",\n    \"            classes[cname].append(fname)\\n\",\n    \"\\n\",\n    \"    return classes\\n\",\n    \"\\n\",\n    \"pprint(list_files())\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Check if folder exist\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 48,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": 48,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def folder_exists(folder_name):\\n\",\n    \"    s3 = boto3.resource(\\\"s3\\\")\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"    for _ in bucket.objects.filter(Prefix=f\\\"{folder_name}/\\\"):\\n\",\n    \"        return True\\n\",\n    \"    return False\\n\",\n    \"\\n\",\n    \"folder_exists(\\\"COMS472\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 12,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"False\"\n      ]\n     },\n     \"execution_count\": 12,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"folder_exists(\\\"test\\\")\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Check if file exist\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 18,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": 18,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"import botocore\\n\",\n    \"\\n\",\n    \"def file_exists(folder_name, file_name):\\n\",\n    \"    s3 = boto3.resource('s3')\\n\",\n    \"\\n\",\n    \"    try:\\n\",\n    \"        s3.Object(\\\"classgpt\\\", f\\\"{folder_name}/{file_name}\\\").load()\\n\",\n    \"        return True\\n\",\n    \"    except botocore.exceptions.ClientError as e:\\n\",\n    \"        if e.response['Error']['Code'] == \\\"404\\\":\\n\",\n    \"            return False\\n\",\n    \"        else:\\n\",\n    \"            raise\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"folder_name = \\\"COMS472\\\"\\n\",\n    \"file_name = \\\"lecture01-intro-2up.pdf\\\"\\n\",\n    \"file_exists(folder_name, file_name)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 19,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"False\"\n      ]\n     },\n     \"execution_count\": 19,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"folder_name = \\\"COMS472\\\"\\n\",\n    \"file_name = \\\"random.pdf\\\"\\n\",\n    \"file_exists(folder_name, file_name)\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Create folder\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 35,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'COMS472', 'CPRE419', 'STAT474', 'test'}\"\n      ]\n     },\n     \"execution_count\": 35,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def create_folder(folder_name):\\n\",\n    \"    s3 = boto3.resource(\\\"s3\\\")\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"\\n\",\n    \"    if not folder_exists(bucket, folder_name):\\n\",\n    \"        bucket.put_object(Key=f\\\"{folder_name}/\\\")\\n\",\n    \"    \\n\",\n    \"\\n\",\n    \"create_folder(\\\"test\\\")\\n\",\n    \"list_folders()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Adding files\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 46,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"defaultdict(list,\\n\",\n       \"            {'COMS472': ['lecture01-intro-2up.pdf',\\n\",\n       \"              'lecture02-agents-2up.pdf',\\n\",\n       \"              'lecture03-1-search-2up.pdf',\\n\",\n       \"              'lecture03-2-informedSearch-2up.pdf',\\n\",\n       \"              'lecture04-localSearch-2up.pdf',\\n\",\n       \"              'lecture05-CSP-2up.pdf',\\n\",\n       \"              'lecture06-game-2up.pdf',\\n\",\n       \"              'test.pdf'],\\n\",\n       \"             'CPRE419': [''],\\n\",\n       \"             'STAT474': ['']})\"\n      ]\n     },\n     \"execution_count\": 46,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def upload_files(file_obj, file_path):\\n\",\n    \"    s3 = boto3.resource(\\\"s3\\\")\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"\\n\",\n    \"    bucket.upload_fileobj(file_obj, file_path)\\n\",\n    \"\\n\",\n    \"with open(\\\"pdfs/lecture01-intro-2up.pdf\\\", \\\"rb\\\") as f:\\n\",\n    \"    upload_files(f, \\\"COMS472/test.pdf\\\")\\n\",\n    \"\\n\",\n    \"list_files()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## remove folder\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 36,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'COMS472', 'CPRE419', 'STAT474'}\"\n      ]\n     },\n     \"execution_count\": 36,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def remove_folder(folder_name):\\n\",\n    \"    s3 = boto3.resource(\\\"s3\\\")\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"\\n\",\n    \"    if folder_exists(bucket, folder_name):\\n\",\n    \"        for key in bucket.objects.filter(Prefix=f\\\"{folder_name}/\\\"):\\n\",\n    \"            key.delete()\\n\",\n    \"\\n\",\n    \"remove_folder(\\\"test\\\")\\n\",\n    \"list_folders()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## remove files\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 47,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"defaultdict(list,\\n\",\n       \"            {'COMS472': ['lecture01-intro-2up.pdf',\\n\",\n       \"              'lecture02-agents-2up.pdf',\\n\",\n       \"              'lecture03-1-search-2up.pdf',\\n\",\n       \"              'lecture03-2-informedSearch-2up.pdf',\\n\",\n       \"              'lecture04-localSearch-2up.pdf',\\n\",\n       \"              'lecture05-CSP-2up.pdf',\\n\",\n       \"              'lecture06-game-2up.pdf'],\\n\",\n       \"             'CPRE419': [''],\\n\",\n       \"             'STAT474': ['']})\"\n      ]\n     },\n     \"execution_count\": 47,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"def remove_file(folder_name, file_name):\\n\",\n    \"    s3 = boto3.resource(\\\"s3\\\")\\n\",\n    \"    bucket = s3.Bucket(\\\"classgpt\\\")\\n\",\n    \"\\n\",\n    \"    if folder_exists(bucket, folder_name):\\n\",\n    \"        bucket.objects.filter(Prefix=f\\\"{folder_name}/{file_name}\\\").delete(\\n\",\n    \"            Delete={\\\"Objects\\\": [{\\\"Key\\\": f\\\"{folder_name}/{file_name}\\\"}]}\\n\",\n    \"        )\\n\",\n    \"\\n\",\n    \"remove_file(\\\"COMS472\\\", \\\"test.pdf\\\")\\n\",\n    \"list_files()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## Create a class\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 62,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# create a dataclass titled s3 that implements the methods above\\n\",\n    \"\\n\",\n    \"class S3:\\n\",\n    \"    def __init__(self, bucket_name):\\n\",\n    \"        self.bucket_name = bucket_name\\n\",\n    \"        self.s3 = boto3.resource(\\\"s3\\\")\\n\",\n    \"        self.bucket = self.s3.Bucket(bucket_name)\\n\",\n    \"\\n\",\n    \"    def list_folders(self):\\n\",\n    \"        folders = set()\\n\",\n    \"        for obj in self.bucket.objects.filter():\\n\",\n    \"            folders.add(obj.key.split(\\\"/\\\")[0])\\n\",\n    \"\\n\",\n    \"        return folders\\n\",\n    \"\\n\",\n    \"    def list_files(self):\\n\",\n    \"        classes = defaultdict(list)\\n\",\n    \"\\n\",\n    \"        # loop through only the parent directory\\n\",\n    \"        for obj in self.bucket.objects.filter():\\n\",\n    \"            cname, fname = obj.key.split(\\\"/\\\")\\n\",\n    \"            if not fname.endswith(\\\".json\\\"):\\n\",\n    \"                classes[cname].append(fname)\\n\",\n    \"\\n\",\n    \"        return classes\\n\",\n    \"\\n\",\n    \"    def folder_exists(self, folder_name):\\n\",\n    \"        for _ in self.bucket.objects.filter(Prefix=f\\\"{folder_name}/\\\"):\\n\",\n    \"            return True\\n\",\n    \"        return False\\n\",\n    \"\\n\",\n    \"    def file_exists(self, folder_name, file_name):\\n\",\n    \"        try:\\n\",\n    \"            self.s3.Object(self.bucket_name, f\\\"{folder_name}/{file_name}\\\").load()\\n\",\n    \"            return True\\n\",\n    \"        except botocore.exceptions.ClientError as e:\\n\",\n    \"            if e.response['Error']['Code'] == \\\"404\\\":\\n\",\n    \"                return False\\n\",\n    \"            else:\\n\",\n    \"                raise\\n\",\n    \"\\n\",\n    \"    def create_folder(self, folder_name):\\n\",\n    \"        if not self.folder_exists(folder_name):\\n\",\n    \"            self.bucket.put_object(Key=f\\\"{folder_name}/\\\")\\n\",\n    \"\\n\",\n    \"    def upload_files(self, file_obj, file_path):\\n\",\n    \"        self.bucket.upload_fileobj(file_obj, file_path)\\n\",\n    \"\\n\",\n    \"    def remove_folder(self, folder_name):\\n\",\n    \"        if self.folder_exists(folder_name):\\n\",\n    \"            for key in self.bucket.objects.filter(Prefix=f\\\"{folder_name}/\\\"):\\n\",\n    \"                key.delete()\\n\",\n    \"\\n\",\n    \"    def remove_file(self, folder_name, file_name):\\n\",\n    \"        if self.folder_exists(folder_name):\\n\",\n    \"            self.bucket.objects.filter(Prefix=f\\\"{folder_name}/{file_name}\\\").delete(\\n\",\n    \"                Delete={\\\"Objects\\\": [{\\\"Key\\\": f\\\"{folder_name}/{file_name}\\\"}]}\\n\",\n    \"            )\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 64,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'COMS472', 'CPRE419', 'STAT474'}\"\n      ]\n     },\n     \"execution_count\": 64,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3 = S3(\\\"classgpt\\\")\\n\",\n    \"\\n\",\n    \"# test all methods\\n\",\n    \"s3.list_folders()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 56,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"defaultdict(list,\\n\",\n       \"            {'COMS472': ['lecture01-intro-2up.pdf',\\n\",\n       \"              'lecture02-agents-2up.pdf',\\n\",\n       \"              'lecture03-1-search-2up.pdf',\\n\",\n       \"              'lecture03-2-informedSearch-2up.pdf',\\n\",\n       \"              'lecture04-localSearch-2up.pdf',\\n\",\n       \"              'lecture05-CSP-2up.pdf',\\n\",\n       \"              'lecture06-game-2up.pdf'],\\n\",\n       \"             'CPRE419': [''],\\n\",\n       \"             'STAT474': ['']})\"\n      ]\n     },\n     \"execution_count\": 56,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3.list_files()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 57,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": 57,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3.folder_exists(\\\"COMS472\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 58,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": 58,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3.file_exists(\\\"COMS472\\\", \\\"lecture01-intro-2up.pdf\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 59,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'COMS472', 'CPRE419', 'STAT474', 'test'}\"\n      ]\n     },\n     \"execution_count\": 59,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3.create_folder(\\\"test\\\")\\n\",\n    \"s3.list_folders()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 60,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"{'COMS472', 'CPRE419', 'STAT474'}\"\n      ]\n     },\n     \"execution_count\": 60,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3.remove_folder(\\\"test\\\")\\n\",\n    \"s3.list_folders()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 65,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"defaultdict(list,\\n\",\n       \"            {'COMS472': ['lecture01-intro-2up.pdf',\\n\",\n       \"              'lecture02-agents-2up.pdf',\\n\",\n       \"              'lecture03-1-search-2up.pdf',\\n\",\n       \"              'lecture03-2-informedSearch-2up.pdf',\\n\",\n       \"              'lecture04-localSearch-2up.pdf',\\n\",\n       \"              'lecture05-CSP-2up.pdf',\\n\",\n       \"              'lecture06-game-2up.pdf',\\n\",\n       \"              'test.pdf'],\\n\",\n       \"             'CPRE419': [''],\\n\",\n       \"             'STAT474': ['']})\"\n      ]\n     },\n     \"execution_count\": 65,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"with open(\\\"pdfs/lecture01-intro-2up.pdf\\\", \\\"rb\\\") as f:\\n\",\n    \"    s3.upload_files(f, \\\"COMS472/test.pdf\\\")\\n\",\n    \"s3.list_files()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 66,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"defaultdict(list,\\n\",\n       \"            {'COMS472': ['lecture01-intro-2up.pdf',\\n\",\n       \"              'lecture02-agents-2up.pdf',\\n\",\n       \"              'lecture03-1-search-2up.pdf',\\n\",\n       \"              'lecture03-2-informedSearch-2up.pdf',\\n\",\n       \"              'lecture04-localSearch-2up.pdf',\\n\",\n       \"              'lecture05-CSP-2up.pdf',\\n\",\n       \"              'lecture06-game-2up.pdf'],\\n\",\n       \"             'CPRE419': [''],\\n\",\n       \"             'STAT474': ['']})\"\n      ]\n     },\n     \"execution_count\": 66,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"s3.remove_file(\\\"COMS472\\\", \\\"test.pdf\\\")\\n\",\n    \"s3.list_files()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"base\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.9.10\"\n  },\n  \"orig_nbformat\": 4,\n  \"vscode\": {\n   \"interpreter\": {\n    \"hash\": \"0f1e841692445df6c0f476977380d4c26cc40d52508098a18c340919add514d9\"\n   }\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "notebooks/chatgpt.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 14,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Note: you may need to restart the kernel to use updated packages.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"%pip install -Uq llama-index openai langchain\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## imports\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"True\"\n      ]\n     },\n     \"execution_count\": 1,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"import logging\\n\",\n    \"import sys\\n\",\n    \"\\n\",\n    \"logging.basicConfig(stream=sys.stdout, level=logging.INFO)\\n\",\n    \"logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\\n\",\n    \"\\n\",\n    \"from llama_index import GPTSimpleVectorIndex, download_loader, SimpleDirectoryReader, LLMPredictor\\n\",\n    \"from IPython.display import Markdown, display\\n\",\n    \"from langchain.llms import OpenAIChat\\n\",\n    \"from IPython.display import Markdown, display\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"from pathlib import Path\\n\",\n    \"import warnings\\n\",\n    \"warnings.filterwarnings('ignore')\\n\",\n    \"\\n\",\n    \"from dotenv import load_dotenv\\n\",\n    \"\\n\",\n    \"# load OPENAI API KEY\\n\",\n    \"load_dotenv()\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## data loader\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"PDFReader = download_loader(\\\"PDFReader\\\")\\n\",\n    \"\\n\",\n    \"loader = PDFReader()\\n\",\n    \"documents = loader.load_data(file=Path('pdfs/lecture01-intro-2up.pdf'))\\n\",\n    \"#print(documents) \"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## manual construction\\n\",\n    \"\\n\",\n    \"source: https://github.com/emptycrown/llama-hub/blob/main/loader_hub/file/pdf/base.py\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pypdf import PdfReader\\n\",\n    \"import re\\n\",\n    \"from io import BytesIO\\n\",\n    \"from llama_index import Document\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def parse_pdf(file: BytesIO):\\n\",\n    \"\\n\",\n    \"    pdf = PdfReader(file)\\n\",\n    \"    text_list = []\\n\",\n    \"    \\n\",\n    \"    # Get the number of pages in the PDF document\\n\",\n    \"    num_pages = len(pdf.pages)\\n\",\n    \"\\n\",\n    \"    # Iterate over every page\\n\",\n    \"    for page in range(num_pages):\\n\",\n    \"        # Extract the text from the page\\n\",\n    \"        page_text = pdf.pages[page].extract_text()\\n\",\n    \"        text_list.append(page_text)\\n\",\n    \"\\n\",\n    \"    text = \\\"\\\\n\\\".join(text_list)\\n\",\n    \"\\n\",\n    \"    return [Document(text)]\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"with open('pdfs/lecture01-intro-2up.pdf', 'rb') as file:\\n\",\n    \"    manual_load = parse_pdf(file)\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## creating index\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:root:> [build_index_from_documents] Total LLM token usage: 0 tokens\\n\",\n      \"> [build_index_from_documents] Total LLM token usage: 0 tokens\\n\",\n      \"INFO:root:> [build_index_from_documents] Total embedding token usage: 1672 tokens\\n\",\n      \"> [build_index_from_documents] Total embedding token usage: 1672 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"index = GPTSimpleVectorIndex(documents)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"index.save_to_disk('index.json')\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# load from disk\\n\",\n    \"index = GPTSimpleVectorIndex.load_from_disk('index.json')\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"##  query chatgpt\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# LLM Predictor (gpt-3.5-turbo)\\n\",\n    \"llm_predictor = LLMPredictor(llm=OpenAIChat(temperature=0, model_name=\\\"gpt-3.5-turbo\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:root:> [query] Total LLM token usage: 1865 tokens\\n\",\n      \"> [query] Total LLM token usage: 1865 tokens\\n\",\n      \"INFO:root:> [query] Total embedding token usage: 9 tokens\\n\",\n      \"> [query] Total embedding token usage: 9 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"response = index.query(\\n\",\n    \"    \\\"Summarize this lecture in bullet points?\\\", \\n\",\n    \"    llm_predictor=llm_predictor,\\n\",\n    \"    similarity_top_k=3\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 8,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"<b>- Introduction to Artificial Intelligence (AI)\\n\",\n       \"- Definition of AI: creating machines that perform intelligent functions\\n\",\n       \"- Characteristics of intelligence: perception, action, reasoning, learning, communication, planning\\n\",\n       \"- Turing Test for measuring intelligent behavior\\n\",\n       \"- Acting rationally: designing rational agents to achieve the best outcome\\n\",\n       \"- Brief history of AI: early success, collapse, industry boom and bust, emergence of intelligent agents, deep learning\\n\",\n       \"- Strong AI and the concept of singularity\\n\",\n       \"- Examples of AI achievements: defeating human champions in chess, checkers, Jeopardy!, Go, and poker; proving mathematical conjectures; controlling spacecraft operations; driverless cars; progress in image and speech recognition, machine translation, and robotic scientists\\n\",\n       \"- AI continues to find applications in various fields.</b>\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"display(Markdown(f\\\"<b>{response}</b>\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:root:> [query] Total LLM token usage: 1976 tokens\\n\",\n      \"> [query] Total LLM token usage: 1976 tokens\\n\",\n      \"INFO:root:> [query] Total embedding token usage: 15 tokens\\n\",\n      \"> [query] Total embedding token usage: 15 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"response = index.query(\\n\",\n    \"    \\\"Give me 3 practice questions with answers based on the content of this lecture.\\\", \\n\",\n    \"    llm_predictor=llm_predictor,\\n\",\n    \"    similarity_top_k=5\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"<b>\\n\",\n       \"\\n\",\n       \"1. What is the definition of AI?\\n\",\n       \"Answer: AI stands for Artificial Intelligence, which is the art of creating machines that perform functions that require intelligence when performed by humans. It is the study of the computations that make it possible to perceive, reason, and act.\\n\",\n       \"\\n\",\n       \"2. What are the four general characteristics of intelligence?\\n\",\n       \"Answer: The four general characteristics of intelligence are perception, action, reasoning, and learning. Perception involves the manipulation and interpretation of data provided by sensors, while action involves the control and use of effectors to accomplish a variety of tasks. Reasoning includes deductive (logical) inference and inductive inference, while learning involves adapting behavior to better cope with changing environments, discovery of patterns, learning to reason, plan, and act.\\n\",\n       \"\\n\",\n       \"3. What are some examples of what AI can do?\\n\",\n       \"Answer: AI has achieved many impressive feats, including defeating world champions in games like chess, checkers, and Go, as well as beating human champions on the game show Jeopardy! It has also been used for logistics planning and scheduling in the military, as well as controlling the operations of spacecraft and rovers on Mars. AI has also made great progress in image recognition, speech recognition, machine translation, and driverless cars.</b>\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"display(Markdown(f\\\"<b>{response}</b>\\\"))\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"base\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.9.10\"\n  },\n  \"orig_nbformat\": 4,\n  \"vscode\": {\n   \"interpreter\": {\n    \"hash\": \"0f1e841692445df6c0f476977380d4c26cc40d52508098a18c340919add514d9\"\n   }\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "notebooks/image_input.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"source: https://github.com/jerryjliu/llama_index/blob/main/examples/multimodal/Multimodal.ipynb\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"%pip install -Uqq llama-index langchain\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from llama_index import SimpleDirectoryReader, GPTSimpleVectorIndex\\n\",\n    \"from llama_index.readers.file.base import (\\n\",\n    \"    DEFAULT_FILE_EXTRACTOR, \\n\",\n    \"    ImageParser,\\n\",\n    \")\\n\",\n    \"from llama_index.response.notebook_utils import (\\n\",\n    \"    display_response, \\n\",\n    \"    display_image,\\n\",\n    \")\\n\",\n    \"from llama_index.indices.query.query_transform.base import (\\n\",\n    \"    ImageOutputQueryTransform,\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"image_parser = ImageParser(parse_text=True)\\n\",\n    \"file_extractor = DEFAULT_FILE_EXTRACTOR\\n\",\n    \"file_extractor.update(\\n\",\n    \"{\\n\",\n    \"    \\\".jpg\\\": image_parser,\\n\",\n    \"    \\\".png\\\": image_parser,\\n\",\n    \"    \\\".jpeg\\\": image_parser,\\n\",\n    \"})\\n\",\n    \"\\n\",\n    \"# NOTE: we add filename as metadata for all documents\\n\",\n    \"filename_fn = lambda filename: {'file_name': filename}\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Downloading (…)rocessor_config.json: 100%|██████████| 362/362 [00:00<00:00, 103kB/s]\\n\",\n      \"Could not find image processor class in the image processor config or the model config. Loading based on pattern matching with the model's feature extractor configuration.\\n\",\n      \"Downloading (…)okenizer_config.json: 100%|██████████| 536/536 [00:00<00:00, 146kB/s]\\n\",\n      \"Downloading (…)ncepiece.bpe.model\\\";: 100%|██████████| 1.30M/1.30M [00:00<00:00, 13.5MB/s]\\n\",\n      \"Downloading (…)/main/tokenizer.json: 100%|██████████| 4.02M/4.02M [00:00<00:00, 8.03MB/s]\\n\",\n      \"Downloading (…)in/added_tokens.json: 100%|██████████| 1.52k/1.52k [00:00<00:00, 428kB/s]\\n\",\n      \"Downloading (…)cial_tokens_map.json: 100%|██████████| 335/335 [00:00<00:00, 118kB/s]\\n\",\n      \"Downloading (…)lve/main/config.json: 100%|██████████| 4.74k/4.74k [00:00<00:00, 1.47MB/s]\\n\",\n      \"Downloading (…)\\\"pytorch_model.bin\\\";: 100%|██████████| 806M/806M [00:21<00:00, 37.1MB/s] \\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"img_reader = SimpleDirectoryReader(\\n\",\n    \"    input_dir='img', \\n\",\n    \"    file_extractor=file_extractor, \\n\",\n    \"    file_metadata=filename_fn,\\n\",\n    \")\\n\",\n    \"imgs = img_reader.load_data()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 18,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"('<s_menu><s_nm> IFO broadcast</s_nm><s_cnt> A</s_cnt><s_price> B<sep/><s_nm> '\\n\",\n      \" 'ml G</s_nm><s_cnt> 1</s_cnt><s_price> ml</s_price><sep/><s_nm> ml '\\n\",\n      \" 'M2</s_nm><s_cnt> 3</s_cnt><s_price> '\\n\",\n      \" 'm2</s_price></s_menu><s_sub_total><s_subtotal_price> node must be delivered '\\n\",\n      \" 'in the</s_subtotal_price></s_sub_total><s_total><s_total_price> they were '\\n\",\n      \" 'sent.</s_total_price></s_total>')\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from pprint import pprint  \\n\",\n    \"pprint(imgs[0].text)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:llama_index.token_counter.token_counter:> [build_index_from_documents] Total LLM token usage: 0 tokens\\n\",\n      \"INFO:llama_index.token_counter.token_counter:> [build_index_from_documents] Total embedding token usage: 184 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"imgs_index = GPTSimpleVectorIndex(imgs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:llama_index.token_counter.token_counter:> [query] Total LLM token usage: 399 tokens\\n\",\n      \"INFO:llama_index.token_counter.token_counter:> [query] Total embedding token usage: 14 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"img_response = imgs_index.query(\\n\",\n    \"    'Explain the messages sent in the given diagram about FIFO broadcast',\\n\",\n    \"    query_transform=ImageOutputQueryTransform(width=400)\\n\",\n    \")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"**`Final Response:`** The given diagram is an illustration of a FIFO (First In First Out) broadcast. It shows the order in which messages are sent and received. The diagram shows three messages, each with a name, count, and price. The first message is IFO broadcast with a count of A and a price of B. The second message is ml G with a count of 1 and a price of ml. The third message is ml M2 with a count of 3 and a price of m2. The diagram also shows a subtotal and a total, indicating the total cost of the messages sent. \\n\",\n       \"\\n\",\n       \"<img src=\\\"img/fifo.png\\\" width=\\\"400\\\" />\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"---\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"**`Source Node 1/1`**\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    },\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"**Document ID:** 7591b551-3524-4726-b769-98f1e66c10b6<br>**Similarity:** 0.8041960290914032<br>**Text:** file_name: img/fifo.png\\n\",\n       \"\\n\",\n       \"<s_menu><s_nm> IFO broadcast</s_nm><s_cnt> A</s_cnt><s_price> B<sep/><s_...<br>\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"display_response(img_response)\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"base\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.9.10\"\n  },\n  \"orig_nbformat\": 4\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "notebooks/index.json",
    "content": "{\n  \"docstore\": {\n    \"docs\": {\n      \"eb0ac69b-4d62-46ab-9c9e-7d40136c9c0d\": {\n        \"__type__\": \"simple_dict\",\n        \"doc_id\": \"eb0ac69b-4d62-46ab-9c9e-7d40136c9c0d\",\n        \"embedding\": null,\n        \"embeddings_dict\": {\n          \"3e6bad2c-8631-4352-8c7b-77c2f207b8f0\": [\n            -0.005091537721455097,\n            -0.0019130054861307144,\n            0.006239341106265783,\n            -0.012197453528642654,\n            -0.006677533499896526,\n            0.0006552452687174082,\n            -0.012413280084729195,\n            0.019502844661474228,\n            -0.007652021944522858,\n            -0.015290963463485241,\n            -0.0034041686449199915,\n            0.03727908059954643,\n            -0.025205891579389572,\n            0.011589216068387032,\n            0.02396325394511223,\n            -0.0008306040544994175,\n            0.027835046872496605,\n            0.012354418635368347,\n            0.017501547932624817,\n            -0.007272691000252962,\n            -0.03283175081014633,\n            0.011896604672074318,\n            -0.003453220007941127,\n            -0.014152970165014267,\n            -0.012197453528642654,\n            -0.005945032462477684,\n            0.032046929001808167,\n            -0.028227457776665688,\n            0.00804443284869194,\n            -0.010725910775363445,\n            0.011236045509576797,\n            -0.022498251870274544,\n            -0.03063424862921238,\n            -0.01899271085858345,\n            -0.014597702771425247,\n            0.009470194578170776,\n            0.002962705912068486,\n            -0.03149755299091339,\n            -0.0005632738466374576,\n            -0.013034597970545292,\n            0.03662506118416786,\n            0.0313405878841877,\n            -0.010359660722315311,\n            -0.009005840867757797,\n            -0.0122759360820055,\n            0.0357094369828701,\n            -0.013812880031764507,\n            -0.018940389156341553,\n            0.010843634605407715,\n            0.001841063378378749,\n            0.007625861093401909,\n            0.04429016262292862,\n            -0.004767798352986574,\n            0.0017266100039705634,\n            -0.018835745751857758,\n            -0.005258312448859215,\n            -0.016455117613077164,\n            0.01672980561852455,\n            0.0358140766620636,\n            -0.017553869634866714,\n            0.008548027835786343,\n            -0.006245880853384733,\n            -0.02971862070262432,\n            0.001044795266352594,\n            -0.02232820726931095,\n            -0.0036919370759278536,\n            -0.0034270593896508217,\n            0.03084353357553482,\n            -0.008626510389149189,\n            -0.013420469127595425,\n            0.02389785274863243,\n            0.022929904982447624,\n            0.010653968900442123,\n            0.012007787823677063,\n            0.02389785274863243,\n            0.014780828729271889,\n            -0.004856090992689133,\n            0.0006184566882438958,\n            -0.013865201734006405,\n            0.002962705912068486,\n            0.017763154581189156,\n            0.0027337991632521152,\n            0.00220731389708817,\n            0.011942386627197266,\n            0.016625162214040756,\n            0.01924123801290989,\n            0.009502895176410675,\n            0.01768467202782631,\n            -0.012805690988898277,\n            -0.008011732250452042,\n            -0.00016840988246258348,\n            0.0018018222181126475,\n            0.00929360929876566,\n            0.025166649371385574,\n            -0.009273989126086235,\n            0.03557863086462021,\n            -0.0014502870617434382,\n            0.0022857962176203728,\n            0.0006854936364106834,\n            -0.043740786612033844,\n            0.011419171467423439,\n            -0.01407448761165142,\n            -0.019568247720599174,\n            -0.013551272451877594,\n            0.004620643798261881,\n            -0.005013055168092251,\n            -0.009424413554370403,\n            0.012897253967821598,\n            0.03929346054792404,\n            0.011203344911336899,\n            0.00759970024228096,\n            0.028724512085318565,\n            0.02104632928967476,\n            -0.03293639421463013,\n            -0.00017658511933404952,\n            -0.006196829490363598,\n            0.021608786657452583,\n            -0.027651920914649963,\n            -0.03042496182024479,\n            -0.013204642571508884,\n            0.00806405395269394,\n            0.026435445994138718,\n            0.02279910072684288,\n            -0.017043733969330788,\n            0.021648027002811432,\n            -0.003072254126891494,\n            -0.0076454817317426205,\n            -0.01612810790538788,\n            -0.013335446827113628,\n            -0.0234923604875803,\n            -0.0013979654759168625,\n            -0.013106539845466614,\n            0.020941687747836113,\n            0.0335380919277668,\n            -0.02506200596690178,\n            -0.0054316273890435696,\n            -0.03769765421748161,\n            0.023296155035495758,\n            0.002382264006882906,\n            -0.016926011070609093,\n            0.017370743677020073,\n            0.012792610563337803,\n            -0.0019669621251523495,\n            -0.011556515470147133,\n            -0.01497703418135643,\n            0.03510773926973343,\n            0.004077808000147343,\n            0.004800498951226473,\n            -0.003695207182317972,\n            0.02885531634092331,\n            0.004937842953950167,\n            -0.0008829255821183324,\n            -0.019790614023804665,\n            0.013374688103795052,\n            -0.00916934572160244,\n            -0.0013243884313851595,\n            -0.006952221505343914,\n            -0.009437493979930878,\n            0.01029425859451294,\n            -0.0007778737926855683,\n            0.008639590814709663,\n            0.010222316719591618,\n            -0.022655216977000237,\n            0.023139191791415215,\n            0.0025833749677985907,\n            0.015304043889045715,\n            -0.013930603861808777,\n            0.0002509797632228583,\n            0.00346957053989172,\n            -0.02210584096610546,\n            -0.007717423606663942,\n            0.017593109980225563,\n            -0.0024820019025355577,\n            0.006697154138237238,\n            0.003613454755395651,\n            0.01466310489922762,\n            0.029640140011906624,\n            0.007979031652212143,\n            -0.032282374799251556,\n            0.006010434124618769,\n            0.008548027835786343,\n            -0.02035306952893734,\n            0.01567029394209385,\n            0.006353794131428003,\n            0.008410683833062649,\n            0.0019489765400066972,\n            0.020889366045594215,\n            -0.004826659802347422,\n            0.02031382918357849,\n            -0.02661857195198536,\n            0.008345281705260277,\n            0.0036003743298351765,\n            0.003973165061324835,\n            -0.0033681977074593306,\n            -0.6303696632385254,\n            -0.01463694404810667,\n            0.006141237914562225,\n            -0.012812231667339802,\n            0.00803789310157299,\n            0.02591223083436489,\n            0.010065351612865925,\n            -0.020052220672369003,\n            0.001958786742761731,\n            0.025389015674591064,\n            -0.0016146092675626278,\n            0.013034597970545292,\n            0.007697802968323231,\n            -0.011504193767905235,\n            -0.012903793714940548,\n            -0.017972441390156746,\n            -0.0015917186392471194,\n            -0.03597104176878929,\n            -0.0003517395816743374,\n            0.005202720873057842,\n            -0.001308855484239757,\n            0.03246550261974335,\n            -0.03149755299091339,\n            -0.01829945109784603,\n            -0.01899271085858345,\n            -0.008750773966312408,\n            0.008207937702536583,\n            0.007841687649488449,\n            0.005601672455668449,\n            0.02143874205648899,\n            -0.03604952618479729,\n            0.024682676419615746,\n            0.010333499871194363,\n            0.009254368022084236,\n            0.0424850732088089,\n            -0.014558462426066399,\n            -0.015447927638888359,\n            0.0470108836889267,\n            0.006373414769768715,\n            0.03223005309700966,\n            -0.006275312043726444,\n            -0.006340713705867529,\n            0.00951597560197115,\n            -0.0014314840082079172,\n            0.005748826544731855,\n            -0.004587943200021982,\n            -5.042460543336347e-06,\n            0.007266150787472725,\n            -0.0019064652733504772,\n            -0.025088166818022728,\n            0.002722353907302022,\n            0.010451222769916058,\n            -0.014519221149384975,\n            -0.0026356964372098446,\n            0.0026209810748696327,\n            -0.004077808000147343,\n            0.027416475117206573,\n            -0.006680803839117289,\n            0.01340738870203495,\n            -0.01921507716178894,\n            0.006801797077059746,\n            0.023636246100068092,\n            -0.02773040346801281,\n            -0.010928656905889511,\n            -0.03105282038450241,\n            0.038534797728061676,\n            -0.016599001362919807,\n            -0.013433549553155899,\n            -0.022092759609222412,\n            -0.02545441873371601,\n            0.011445331387221813,\n            0.024957364425063133,\n            -0.010091512463986874,\n            -0.012799151241779327,\n            0.010222316719591618,\n            0.00318507244810462,\n            0.02992790751159191,\n            0.012792610563337803,\n            -0.01811632513999939,\n            0.0007950417930260301,\n            -0.0009834810625761747,\n            -0.031131302937865257,\n            0.004643534775823355,\n            -0.04196185618638992,\n            0.014009086415171623,\n            -0.009234747849404812,\n            0.009522516280412674,\n            -0.0036101846490055323,\n            -0.012753370217978954,\n            0.0083845229819417,\n            0.030895855277776718,\n            0.029875585809350014,\n            -0.025755267590284348,\n            -0.0469847209751606,\n            -0.013485871255397797,\n            0.015225561335682869,\n            -0.00115761358756572,\n            -0.020261507481336594,\n            -0.008469545282423496,\n            -0.004502920433878899,\n            0.024499550461769104,\n            -0.014270693995058537,\n            0.020496955141425133,\n            0.04363614693284035,\n            0.003973165061324835,\n            0.015696454793214798,\n            0.012001248076558113,\n            0.00693914107978344,\n            0.022942984476685524,\n            -0.026016874238848686,\n            -0.01608886569738388,\n            -0.011242586188018322,\n            0.00016217626398429275,\n            0.0380639024078846,\n            0.010601647198200226,\n            -0.00357748381793499,\n            0.008456464856863022,\n            0.001831253059208393,\n            0.021150972694158554,\n            -0.019097354263067245,\n            0.019097354263067245,\n            -0.01039890106767416,\n            0.021530304104089737,\n            -0.024041736498475075,\n            -0.002956165699288249,\n            0.0016121567459776998,\n            -0.01699141226708889,\n            -0.016428956761956215,\n            -0.018940389156341553,\n            -0.017279181629419327,\n            0.0083845229819417,\n            -0.03262246400117874,\n            -0.01161537691950798,\n            -0.0006597416358999908,\n            0.023335397243499756,\n            -0.0067167747765779495,\n            0.0077239638194441795,\n            -0.0028956688474863768,\n            -0.012105890549719334,\n            0.00357748381793499,\n            -0.015238641761243343,\n            0.0025931852869689465,\n            0.025768347084522247,\n            -0.0013595419004559517,\n            -0.01785471849143505,\n            -0.015055516734719276,\n            -0.004372116643935442,\n            -0.013839040882885456,\n            -0.02281218208372593,\n            -0.0038194707594811916,\n            -0.02103324979543686,\n            0.00882271584123373,\n            0.0006290844758041203,\n            0.027154866605997086,\n            0.005376035813242197,\n            -0.024918122217059135,\n            -0.018508736044168472,\n            -0.025872990489006042,\n            -0.0004995070048607886,\n            -0.02640928514301777,\n            -0.02321767248213291,\n            0.022498251870274544,\n            -0.0031392909586429596,\n            0.01183120347559452,\n            0.0006691431626677513,\n            -0.004846280440688133,\n            0.008417223580181599,\n            0.033668894320726395,\n            -0.05451902002096176,\n            -0.05723974108695984,\n            0.006621941924095154,\n            -0.0009246193221770227,\n            0.020444633439183235,\n            0.026200000196695328,\n            -0.01589266024529934,\n            0.013152320869266987,\n            -0.011582675389945507,\n            -0.009215126745402813,\n            0.0058567398227751255,\n            -0.01553949061781168,\n            -0.013891362585127354,\n            0.014244533143937588,\n            0.010503544472157955,\n            -0.008894657716155052,\n            0.018377933651208878,\n            0.00237735896371305,\n            0.024277184158563614,\n            0.01039236132055521,\n            -0.02171343006193638,\n            0.006020244676619768,\n            0.012118970975279808,\n            0.03934578225016594,\n            0.018731102347373962,\n            -0.0018377932719886303,\n            -0.029143085703253746,\n            0.041883375495672226,\n            -0.013041137717664242,\n            -0.0016309597995132208,\n            0.001916275592520833,\n            0.032046929001808167,\n            0.021517224609851837,\n            0.008541488088667393,\n            -0.0007067492697387934,\n            -0.004715476650744677,\n            0.014453819021582603,\n            -0.013034597970545292,\n            0.014597702771425247,\n            -0.02060159668326378,\n            0.027442635968327522,\n            -0.011563055217266083,\n            0.02079780213534832,\n            -0.004774338565766811,\n            0.009659860283136368,\n            -0.024643434211611748,\n            -0.000886195688508451,\n            0.041883375495672226,\n            -0.014087568037211895,\n            0.01809016428887844,\n            -0.05572241544723511,\n            -0.004780878778547049,\n            0.02841058373451233,\n            0.031131302937865257,\n            -0.003528432222083211,\n            0.006602321285754442,\n            -0.006723314989358187,\n            0.028933798894286156,\n            0.021412581205368042,\n            0.025323614478111267,\n            -0.023126110434532166,\n            -0.04444712772965431,\n            -0.007972490973770618,\n            0.008757313713431358,\n            -0.004502920433878899,\n            0.009633699432015419,\n            0.030948176980018616,\n            0.026108436286449432,\n            0.00458467286080122,\n            -0.019149675965309143,\n            0.02749495767056942,\n            0.017828557640314102,\n            -0.015565651468932629,\n            -0.012341338209807873,\n            0.028462905436754227,\n            -0.04656615108251572,\n            0.016690563410520554,\n            0.01484623085707426,\n            0.036101847887039185,\n            0.016389714553952217,\n            -0.0167690459638834,\n            0.030346479266881943,\n            -0.014479979872703552,\n            -0.021634947508573532,\n            -0.0313405878841877,\n            0.007102645933628082,\n            -0.015173239633440971,\n            -0.015526410192251205,\n            0.02503584511578083,\n            -0.008646130561828613,\n            -0.006255691405385733,\n            0.03330264613032341,\n            0.016625162214040756,\n            -0.013865201734006405,\n            0.029169246554374695,\n            -0.008018272928893566,\n            -0.0024067896883934736,\n            -0.010876335203647614,\n            0.012249775230884552,\n            -0.005081727169454098,\n            -0.0070764850825071335,\n            0.007344632875174284,\n            0.00012038036220474169,\n            -0.002060159808024764,\n            0.015212480910122395,\n            -0.0031916124280542135,\n            0.036311130970716476,\n            0.013270044699311256,\n            0.005634373519569635,\n            -0.0005988361081108451,\n            0.03063424862921238,\n            0.02194887585937977,\n            -0.027992011979222298,\n            -0.043243732303380966,\n            -0.0010627808514982462,\n            0.025375936180353165,\n            0.00938517227768898,\n            1.3987319107400253e-06,\n            -0.019058112055063248,\n            0.0034139789640903473,\n            -0.009757963009178638,\n            -0.02037923038005829,\n            -0.010621268302202225,\n            0.018495656549930573,\n            -0.007959410548210144,\n            0.012995356693863869,\n            -0.01030079834163189,\n            -0.010889415629208088,\n            0.047978829592466354,\n            -0.0028106465470045805,\n            0.013956764712929726,\n            -0.014453819021582603,\n            0.021988118067383766,\n            -0.004803769290447235,\n            -0.008979680016636848,\n            -0.014571542851626873,\n            0.0256898645311594,\n            -0.010104592889547348,\n            -0.008534947410225868,\n            0.011020219884812832,\n            0.0076127806678414345,\n            -0.029640140011906624,\n            0.008626510389149189,\n            0.004012406338006258,\n            0.01130144763737917,\n            0.010791312903165817,\n            0.016428956761956215,\n            -0.00563764339312911,\n            0.0012303731637075543,\n            -0.01107908133417368,\n            0.01610194705426693,\n            -0.009928007610142231,\n            -0.00870499201118946,\n            -0.03379970043897629,\n            0.013786720111966133,\n            0.02749495767056942,\n            0.060902245342731476,\n            0.007508137729018927,\n            -0.022419769316911697,\n            0.021072490140795708,\n            -0.023060709238052368,\n            -0.017815476283431053,\n            -0.01309345941990614,\n            -0.016023464500904083,\n            0.023361558094620705,\n            0.020627757534384727,\n            -0.0058600096963346004,\n            0.013551272451877594,\n            0.023060709238052368,\n            -0.010281178168952465,\n            0.02791352942585945,\n            0.002557214116677642,\n            0.01230209693312645,\n            -0.0234792809933424,\n            0.01406140811741352,\n            -0.005346605088561773,\n            0.011471492238342762,\n            0.012956115417182446,\n            0.006124887615442276,\n            0.020052220672369003,\n            0.009797204285860062,\n            -0.033224161714315414,\n            0.021844232454895973,\n            0.008796554990112782,\n            -0.023204592987895012,\n            -0.01807708479464054,\n            0.005650723818689585,\n            0.01674288511276245,\n            -0.001294957590289414,\n            -0.022707538679242134,\n            -0.0009090863750316203,\n            0.024080978706479073,\n            0.01746230572462082,\n            0.02439490705728531,\n            -0.0026209810748696327,\n            -0.012001248076558113,\n            0.01785471849143505,\n            0.030006390064954758,\n            -0.006075836252421141,\n            -0.024460308253765106,\n            -0.0060692960396409035,\n            -0.02253749407827854,\n            0.031000498682260513,\n            0.04149096459150314,\n            -0.02992790751159191,\n            -0.023361558094620705,\n            0.010209236294031143,\n            0.012884173542261124,\n            -0.02060159668326378,\n            0.0075800796039402485,\n            -0.00972526241093874,\n            0.008469545282423496,\n            0.008678831160068512,\n            -0.0009417873225174844,\n            -0.002908749273046851,\n            -0.012779530137777328,\n            0.006242610979825258,\n            -0.0016759235877543688,\n            0.02415946125984192,\n            -0.0015933536924421787,\n            -0.011752720922231674,\n            -0.026644732803106308,\n            0.005441437941044569,\n            0.01695217192173004,\n            -0.009332850575447083,\n            0.01584033854305744,\n            0.0035545930732041597,\n            -0.01497703418135643,\n            -0.03827318921685219,\n            -0.021818073466420174,\n            0.014702346175909042,\n            0.013368147425353527,\n            0.020614678040146828,\n            -0.019542086869478226,\n            -0.015931902453303337,\n            0.010529705323278904,\n            0.008031352423131466,\n            -0.05797224119305611,\n            0.001975137274712324,\n            -0.03288407251238823,\n            0.01632431335747242,\n            -0.00453235162422061,\n            0.01331582572311163,\n            -0.0075800796039402485,\n            -0.0029447204433381557,\n            -0.010915576480329037,\n            -0.001937531167641282,\n            -0.0062491511926054955,\n            0.021281776949763298,\n            0.012047029100358486,\n            -0.0012540813768282533,\n            0.014401497319340706,\n            -0.011824662797152996,\n            0.013021517544984818,\n            0.0030640787445008755,\n            -0.040366049855947495,\n            -0.005601672455668449,\n            -0.0069326008670032024,\n            -0.018024763092398643,\n            -0.017815476283431053,\n            0.026304641738533974,\n            0.0025441336911171675,\n            0.005794608034193516,\n            0.0008714802679605782,\n            -0.02415946125984192,\n            -0.014257613569498062,\n            0.019175836816430092,\n            -0.0025604842230677605,\n            0.04742945358157158,\n            -0.010281178168952465,\n            -0.004365576431155205,\n            0.02529745362699032,\n            0.013564352877438068,\n            0.005065376870334148,\n            0.033616576343774796,\n            -0.012249775230884552,\n            -0.001862318953499198,\n            -0.025598302483558655,\n            0.007468896452337503,\n            -0.00023830815916880965,\n            -0.014270693995058537,\n            -0.007508137729018927,\n            0.005111158359795809,\n            -0.0071091861464083195,\n            -0.01629815250635147,\n            0.0007488516857847571,\n            -0.00592214148491621,\n            0.007606240455061197,\n            -0.007521218154579401,\n            -0.010039190761744976,\n            0.008443384431302547,\n            -0.009175886400043964,\n            -0.010084972716867924,\n            0.020169945433735847,\n            -0.021608786657452583,\n            0.0010251747444272041,\n            -0.010673589073121548,\n            -0.011700399219989777,\n            0.0022351099178195,\n            0.00869845226407051,\n            0.021137893199920654,\n            -0.03620649129152298,\n            9.289113222621381e-05,\n            0.024852721020579338,\n            0.005689965095371008,\n            0.03780229762196541,\n            0.00037933100247755647,\n            0.018024763092398643,\n            -0.02194887585937977,\n            -0.018024763092398643,\n            -0.005536270327866077,\n            -0.034087467938661575,\n            -0.01164153777062893,\n            -0.0189665500074625,\n            0.03272710740566254,\n            0.01387828215956688,\n            0.0012704317923635244,\n            -0.024734996259212494,\n            0.017580030485987663,\n            0.01196854654699564,\n            0.005117698572576046,\n            -0.017933199182152748,\n            0.023335397243499756,\n            0.0024280454963445663,\n            -0.015212480910122395,\n            -0.005941762123256922,\n            0.02549365907907486,\n            0.014793909154832363,\n            0.004836470354348421,\n            -0.0011755990562960505,\n            0.0010423427447676659,\n            0.014741587452590466,\n            -0.007593160029500723,\n            -0.014296854846179485,\n            -0.03730524331331253,\n            -0.02812281623482704,\n            -0.0015271343290805817,\n            0.027207188308238983,\n            0.002503257477656007,\n            0.00681487750262022,\n            -0.040339890867471695,\n            0.008750773966312408,\n            0.024643434211611748,\n            0.007880928926169872,\n            0.009777583181858063,\n            0.0011061095865443349,\n            0.021281776949763298,\n            0.021634947508573532,\n            0.002032364020124078,\n            0.023753968998789787,\n            0.0016685659065842628,\n            -0.0028041063342243433,\n            -0.006795256864279509,\n            -0.025768347084522247,\n            0.020065302029252052,\n            0.00926090870052576,\n            -0.006867199204862118,\n            0.0011396280024200678,\n            0.02725951001048088,\n            -0.00413012970238924,\n            0.007566999178379774,\n            -0.0041006989777088165,\n            0.006350524257868528,\n            0.005359685514122248,\n            0.007671642582863569,\n            -0.012903793714940548,\n            -0.0033780080266296864,\n            -0.029169246554374695,\n            -0.006350524257868528,\n            -0.01853489689528942,\n            0.014597702771425247,\n            0.009444033727049828,\n            0.013008437119424343,\n            0.03249166160821915,\n            -0.021595705300569534,\n            -0.0446564145386219,\n            0.03740988299250603,\n            -0.015461008064448833,\n            0.03304103761911392,\n            0.002691288013011217,\n            0.06880279630422592,\n            0.01632431335747242,\n            0.019372042268514633,\n            -0.0050294059328734875,\n            0.009836445562541485,\n            0.007351173087954521,\n            0.02391093410551548,\n            0.028907638043165207,\n            0.005474138539284468,\n            0.010843634605407715,\n            -0.011020219884812832,\n            -0.0035251621156930923,\n            0.005503569729626179,\n            -0.004077808000147343,\n            -0.050621066242456436,\n            0.010824013501405716,\n            0.009018921293318272,\n            0.005739016458392143,\n            -0.020300747826695442,\n            -0.01874418370425701,\n            -0.004793959204107523,\n            0.02838442288339138,\n            0.0018933848477900028,\n            -0.0049607339315116405,\n            -0.004355766344815493,\n            -0.016455117613077164,\n            -0.007364253513514996,\n            0.013943684287369251,\n            -0.007226909510791302,\n            0.021164054051041603,\n            0.006059485487639904,\n            -0.003417249070480466,\n            -0.00012293513282202184,\n            -0.01922815665602684,\n            0.004054917488247156,\n            0.006386495195329189,\n            0.002920194761827588,\n            0.010196155868470669,\n            -0.019529005512595177,\n            0.018613379448652267,\n            0.010215776041150093,\n            -0.0050751869566738605,\n            0.0013742573792114854,\n            -0.001970232231542468,\n            0.01612810790538788,\n            0.014453819021582603,\n            -0.020928606390953064,\n            -0.014218372292816639,\n            -0.014820070005953312,\n            -0.004888791590929031,\n            0.0009589553228579462,\n            -0.021307937800884247,\n            -0.013355066999793053,\n            0.008436844684183598,\n            -0.015107838436961174,\n            -0.0029430852737277746,\n            0.020431552082300186,\n            0.02031382918357849,\n            -0.02477423846721649,\n            -0.0011371754808351398,\n            0.00033518471173010767,\n            -0.016664402559399605,\n            0.010026110336184502,\n            -0.01095481775701046,\n            0.007638941518962383,\n            -0.0122759360820055,\n            -0.013564352877438068,\n            0.012210533954203129,\n            -0.01746230572462082,\n            -0.012511382810771465,\n            0.025663703680038452,\n            0.00316218170337379,\n            -0.001379980007186532,\n            0.04405471682548523,\n            -0.020496955141425133,\n            -0.0003053451073355973,\n            0.014702346175909042,\n            0.0257421862334013,\n            -0.02392401359975338,\n            -0.005820768885314465,\n            -0.025859909132122993,\n            -0.00742311542853713,\n            0.02503584511578083,\n            -0.02078472264111042,\n            -0.006108537316322327,\n            -0.016232751309871674,\n            -0.013668996281921864,\n            0.010810933075845242,\n            -0.02545441873371601,\n            -0.013361607678234577,\n            -0.002606265479698777,\n            0.01653360016644001,\n            -0.015696454793214798,\n            0.007089565508067608,\n            -0.025192810222506523,\n            -0.007867848500609398,\n            0.01635047420859337,\n            0.008364902809262276,\n            0.01488547120243311,\n            -0.015133998356759548,\n            0.03952890634536743,\n            -0.00541527708992362,\n            0.02684093825519085,\n            0.01309345941990614,\n            -0.020706240087747574,\n            -0.02410713955760002,\n            -0.01331582572311163,\n            0.004659885074943304,\n            -0.024015575647354126,\n            0.017514627426862717,\n            0.0010578756919130683,\n            0.013250424526631832,\n            -0.01565721444785595,\n            -0.00793979037553072,\n            -0.012472141534090042,\n            -0.0036069145426154137,\n            -0.01117718406021595,\n            0.012426360510289669,\n            -0.0033191463444381952,\n            0.004486570134758949,\n            -0.0004247035540174693,\n            -0.018888067454099655,\n            -0.005703045520931482,\n            -4.892368451692164e-05,\n            -0.042328108102083206,\n            0.021857313811779022,\n            0.0009360646363347769,\n            0.035421665757894516,\n            0.012812231667339802,\n            -0.0015990763204172254,\n            -0.032543983310461044,\n            -0.025859909132122993,\n            -0.03259630501270294,\n            -0.0031703568529337645,\n            0.00951597560197115,\n            0.007344632875174284,\n            0.013302745297551155,\n            0.0334596112370491,\n            0.000456178211607039,\n            0.022001197561621666,\n            -0.005081727169454098,\n            0.01700449362397194,\n            -0.01453230157494545,\n            -0.00827988050878048,\n            -0.023309236392378807,\n            -0.02882915548980236,\n            -0.027390314266085625,\n            -0.006520568858832121,\n            -0.007096105720847845,\n            -0.020235346630215645,\n            -0.007854768075048923,\n            -0.028018172830343246,\n            -0.007004543207585812,\n            0.021085571497678757,\n            0.010281178168952465,\n            -0.01880958490073681,\n            0.023178432136774063,\n            -0.017370743677020073,\n            -0.0017952820053324103,\n            0.020653918385505676,\n            0.007998651824891567,\n            -0.008848876692354679,\n            -0.00837798323482275,\n            -0.01195546705275774,\n            -0.028750672936439514,\n            -0.011151023209095001,\n            -0.0063930354081094265,\n            -0.000860034953802824,\n            0.009208586998283863,\n            0.018508736044168472,\n            -0.010019570589065552,\n            -0.01792011968791485,\n            -0.010313878767192364,\n            -0.012897253967821598,\n            0.027651920914649963,\n            0.007874388247728348,\n            -0.00720728887245059,\n            0.01252446323633194,\n            0.0016464927466586232,\n            -0.008515327237546444,\n            -0.04206649959087372,\n            -0.01007189229130745,\n            0.020235346630215645,\n            0.01590574160218239,\n            0.0029839614871889353,\n            -0.010065351612865925,\n            0.02257673442363739,\n            -0.021870393306016922,\n            0.007410035002976656,\n            -0.0358140766620636,\n            -0.009156265296041965,\n            0.012445980682969093,\n            0.00894697941839695,\n            -0.02948317490518093,\n            0.018417173996567726,\n            0.016232751309871674,\n            0.007802446372807026,\n            -0.0048168497160077095,\n            -0.00948327500373125,\n            -0.013433549553155899,\n            -0.03623265027999878,\n            -0.03479380905628204,\n            -0.005542810540646315,\n            -0.006808337289839983,\n            0.012164752930402756,\n            0.02974478155374527,\n            -0.02838442288339138,\n            -0.009450574405491352,\n            0.00206997012719512,\n            0.005575511604547501,\n            -0.02053619548678398,\n            -0.01083055417984724,\n            0.19746141135692596,\n            -0.011026759631931782,\n            0.011589216068387032,\n            0.013551272451877594,\n            -0.018940389156341553,\n            -0.0031768970657140017,\n            0.0380377434194088,\n            -0.01406140811741352,\n            -0.0190188717097044,\n            -0.01117718406021595,\n            -0.0038031202275305986,\n            0.008626510389149189,\n            -0.038560956716537476,\n            0.0009990140097215772,\n            0.011890064924955368,\n            -0.018443334847688675,\n            -0.026553170755505562,\n            -0.047717224806547165,\n            0.002261270536109805,\n            0.0106147276237607,\n            -0.015958063304424286,\n            -0.009581377729773521,\n            -0.021608786657452583,\n            -0.019411282613873482,\n            0.028802994638681412,\n            0.010156914591789246,\n            -0.013355066999793053,\n            0.00703070405870676,\n            0.02812281623482704,\n            -0.010601647198200226,\n            -0.01287109311670065,\n            -0.0014159510610625148,\n            0.005987543612718582,\n            0.007122266571968794,\n            -0.023374637588858604,\n            -0.0034205191768705845,\n            0.014022166840732098,\n            0.016455117613077164,\n            0.007619320880621672,\n            -0.03461068496108055,\n            0.003485921071842313,\n            0.028227457776665688,\n            0.011321067810058594,\n            -0.027207188308238983,\n            0.02438182756304741,\n            0.03280559182167053,\n            -0.003281540237367153,\n            -0.028462905436754227,\n            -0.030503444373607635,\n            0.023989414796233177,\n            -0.02328307554125786,\n            -0.003440139815211296,\n            -0.004735097289085388,\n            0.020444633439183235,\n            -0.011661157943308353,\n            -0.006880279630422592,\n            -0.004686045926064253,\n            0.01746230572462082,\n            0.02882915548980236,\n            -0.0007136982167139649,\n            -0.016677483916282654,\n            0.014362256042659283,\n            -0.000626223161816597,\n            0.026069195941090584,\n            -0.004117049276828766,\n            0.04360998421907425,\n            -0.023099949583411217,\n            0.0006805884768255055,\n            0.006756016053259373,\n            -0.008567648008465767,\n            0.0013407388469204307,\n            -0.03220389410853386,\n            -0.04423784092068672,\n            -0.008528407663106918,\n            -0.01017653476446867,\n            -0.032517824321985245,\n            0.030503444373607635,\n            0.039476584643125534,\n            0.0010088242124766111,\n            -0.0030444583389908075,\n            0.00014316884335130453,\n            0.015173239633440971,\n            -0.022223563864827156,\n            -0.0075800796039402485,\n            -0.013100000098347664,\n            -0.008430304005742073,\n            0.029561657458543777,\n            0.006046405527740717,\n            -0.004463679622858763,\n            0.0016873689601197839,\n            -0.018024763092398643,\n            -0.01969905197620392,\n            -0.0003376372915226966,\n            -0.009842985309660435,\n            0.005830578971654177,\n            -0.017946280539035797,\n            0.03693899139761925,\n            0.014689265750348568,\n            -0.00027979747392237186,\n            -0.0057913376949727535,\n            -0.017750075086951256,\n            0.0581292062997818,\n            0.011903145350515842,\n            0.026082275435328484,\n            0.004179181065410376,\n            9.876707918010652e-05,\n            0.004954193718731403,\n            0.013485871255397797,\n            0.006311282981187105,\n            -0.011092161759734154,\n            -0.008463005535304546,\n            -0.0491560660302639,\n            0.0035840237978845835,\n            0.007050324231386185,\n            0.0033780080266296864,\n            0.015801098197698593,\n            -0.028567548841238022,\n            -0.015369446016848087,\n            9.871598740573972e-05,\n            -0.009921467863023281,\n            -0.008090214803814888,\n            -0.00524523202329874,\n            -0.006514028646051884,\n            0.016389714553952217,\n            -0.020863205194473267,\n            -0.04151712357997894,\n            -0.022681377828121185,\n            0.01899271085858345,\n            0.003167086746543646,\n            -0.010326959192752838,\n            0.008803095668554306,\n            -0.039502743631601334,\n            -0.015173239633440971,\n            0.035918720066547394,\n            -0.00403202697634697,\n            0.0014265788486227393,\n            -0.00040528737008571625,\n            0.007632401306182146,\n            0.003816200653091073,\n            -0.009901846759021282,\n            -0.0019113704329356551,\n            0.011255666613578796,\n            0.009856065735220909,\n            0.024198701605200768,\n            0.003482650965452194,\n            -0.016036545857787132,\n            0.009130104444921017,\n            0.014440738596022129,\n            -0.021137893199920654,\n            -0.006912980228662491,\n            -0.010320419445633888,\n            0.02746879681944847,\n            0.022511333227157593,\n            0.0022514602169394493,\n            0.029587818309664726,\n            0.025598302483558655,\n            -0.013825960457324982,\n            0.01027463749051094,\n            -0.006052945274859667,\n            0.015788016840815544,\n            -0.04073230177164078,\n            0.006520568858832121,\n            0.015304043889045715,\n            -0.0014609148493036628,\n            -0.0201568640768528,\n            -0.02098092809319496,\n            -0.16366170346736908,\n            0.0023871692828834057,\n            0.02661857195198536,\n            -0.03149755299091339,\n            0.02480039931833744,\n            0.029012281447649002,\n            0.019162755459547043,\n            -0.012923414818942547,\n            0.0023054168559610844,\n            -0.01945052482187748,\n            0.01835177280008793,\n            -0.008783474564552307,\n            -0.0019031951669603586,\n            -0.02591223083436489,\n            -0.013152320869266987,\n            0.0030869694892317057,\n            -0.02389785274863243,\n            0.045938290655612946,\n            0.024682676419615746,\n            -0.006769096478819847,\n            0.015434847213327885,\n            -0.024015575647354126,\n            0.018600299954414368,\n            -0.00020897950162179768,\n            0.021765751764178276,\n            6.254056643228978e-05,\n            -0.0033681977074593306,\n            0.002016013488173485,\n            -0.005343334749341011,\n            -0.04264203459024429,\n            0.005055566783994436,\n            -0.03937194123864174,\n            0.014035247266292572,\n            0.004708936437964439,\n            0.03471532464027405,\n            -0.015931902453303337,\n            0.016245830804109573,\n            -0.013243883848190308,\n            -0.04766490310430527,\n            0.01308691967278719,\n            0.019162755459547043,\n            0.021870393306016922,\n            0.014898551627993584,\n            0.0019130054861307144,\n            0.00324229896068573,\n            -0.009712181985378265,\n            0.009319770149886608,\n            -0.015382526442408562,\n            0.010320419445633888,\n            -0.021268697455525398,\n            0.013682076707482338,\n            -0.019372042268514633,\n            0.02081088349223137,\n            -0.018600299954414368,\n            0.033198002725839615,\n            0.014898551627993584,\n            -0.012602945789694786,\n            -0.008757313713431358,\n            -0.00326518970541656,\n            -0.02192271500825882,\n            -0.004686045926064253,\n            -0.004990164656192064,\n            0.00452254107221961,\n            -0.0037900398019701242,\n            -0.007586619816720486,\n            -0.0037704193964600563,\n            -0.015238641761243343,\n            0.026448527351021767,\n            -0.02077164314687252,\n            0.02075856178998947,\n            -0.005689965095371008,\n            -0.007168047595769167,\n            -0.005853469483554363,\n            -0.025415176525712013,\n            -0.013028057292103767,\n            0.018678780645132065,\n            -0.014349175617098808,\n            -0.01240019965916872,\n            0.0074361953884363174,\n            0.008397603407502174,\n            -0.01208627037703991,\n            0.021726509556174278,\n            -0.024316424503922462,\n            -0.008986220695078373,\n            -0.007933249697089195,\n            0.003279905067756772,\n            0.015304043889045715,\n            -0.002293971600010991,\n            -0.006926060654222965,\n            0.0016759235877543688,\n            0.020510034635663033,\n            -0.028096655383706093,\n            -0.04633070155978203,\n            0.0019440713804215193,\n            -0.0009981964249163866,\n            0.001741325482726097,\n            0.016180429607629776,\n            -0.028279779478907585,\n            -0.0011216425336897373,\n            -0.02977094240486622,\n            0.009175886400043964,\n            -0.0018198078032582998,\n            0.014283774420619011,\n            -0.016808288171887398,\n            -0.004123589489609003,\n            0.006474787835031748,\n            -0.00508499750867486,\n            -0.012249775230884552,\n            0.027390314266085625,\n            -0.01628507301211357,\n            0.026226161047816277,\n            -0.009522516280412674,\n            0.017540788277983665,\n            0.03890104964375496,\n            -0.02077164314687252,\n            0.011543435044586658,\n            -0.023361558094620705,\n            -0.013171941973268986,\n            0.00045290813432075083,\n            -0.0067167747765779495,\n            0.01655976101756096,\n            0.006141237914562225,\n            -0.01967289112508297,\n            0.03419211134314537,\n            -0.02232820726931095,\n            -0.025990713387727737,\n            -0.09674248844385147,\n            -0.03680818900465965,\n            -0.00026957844966091216,\n            0.022511333227157593,\n            -0.011994707398116589,\n            0.03288407251238823,\n            -0.009757963009178638,\n            0.005686694756150246,\n            -0.03037264011800289,\n            0.025807587429881096,\n            -0.01768467202782631,\n            -0.01812940649688244,\n            -0.01365591585636139,\n            -0.018914228305220604,\n            0.01038582157343626,\n            -0.013760559260845184,\n            -0.009476734325289726,\n            -0.03649425879120827,\n            -0.01746230572462082,\n            0.01879650540649891,\n            0.011046379804611206,\n            0.004872441291809082,\n            0.017082976177334785,\n            -0.024251023307442665,\n            -0.002027458744123578,\n            -0.009869146160781384,\n            -0.035892561078071594,\n            0.021752670407295227,\n            0.01964673027396202,\n            -0.002217124216258526,\n            -0.010686669498682022,\n            -0.011713479645550251,\n            0.007272691000252962,\n            -0.029849424958229065,\n            0.01161537691950798,\n            -0.0009916562121361494,\n            0.003891412867233157,\n            -0.020012980327010155,\n            0.023178432136774063,\n            -0.029195405542850494,\n            -0.015133998356759548,\n            -0.0020078381057828665,\n            0.008659210987389088,\n            -0.003518621902912855,\n            -0.0023005118127912283,\n            -0.01497703418135643,\n            -0.0013758924324065447,\n            0.012203994207084179,\n            -0.011903145350515842,\n            -0.03272710740566254,\n            -0.008548027835786343,\n            -0.019411282613873482,\n            -0.02366240695118904,\n            0.012203994207084179,\n            0.00033273216104134917,\n            -0.0002947172906715423,\n            0.036782026290893555,\n            -0.015709536150097847,\n            -0.0035938341170549393,\n            0.02656625024974346,\n            0.005428357515484095,\n            0.013616674579679966,\n            -0.019372042268514633,\n            0.006272041704505682,\n            0.00254249875433743,\n            0.0007427202654071152,\n            -0.020863205194473267,\n            -0.0020307288505136967,\n            0.0022122191730886698,\n            -0.03843015432357788,\n            0.007279231213033199,\n            0.020444633439183235,\n            -0.006049675401300192,\n            0.004375386983156204,\n            -0.009973789565265179,\n            -0.010562405921518803,\n            -0.001448652008548379,\n            -0.015356365591287613,\n            0.01672980561852455,\n            0.00592214148491621,\n            -0.032099250704050064,\n            -0.02524513192474842,\n            -0.007913629524409771,\n            0.002585009904578328,\n            0.006294932682067156,\n            0.0018868447514250875,\n            -0.01812940649688244,\n            -0.015395605936646461,\n            0.009672940708696842,\n            -0.02328307554125786,\n            -0.011634997092187405,\n            0.03714827820658684,\n            0.012570244260132313,\n            0.007233449723571539,\n            0.0009973789565265179,\n            -0.001406140741892159,\n            -0.014388416893780231,\n            -0.009705641306936741,\n            0.027128705754876137,\n            0.02595147304236889,\n            -0.004453869070857763,\n            -0.0022514602169394493,\n            -0.0513797290623188,\n            0.021321017295122147,\n            -0.008345281705260277,\n            -0.020457712933421135,\n            -0.003809660440310836,\n            -0.0010047366376966238,\n            0.020418472588062286,\n            -0.02415946125984192,\n            -0.0033518471755087376,\n            0.006579430773854256,\n            -0.04141248017549515,\n            0.011255666613578796,\n            -0.010425061918795109,\n            -0.006841038353741169,\n            -0.012949575670063496,\n            0.001469907583668828,\n            0.028253618627786636,\n            0.0015091487439349294,\n            -0.01027463749051094,\n            -0.002521243179216981,\n            -0.0012614390579983592,\n            0.0008445020066574216,\n            0.030058711767196655,\n            -0.0006335808429867029,\n            -0.01385212130844593,\n            0.00882271584123373,\n            -0.0256898645311594,\n            0.02640928514301777,\n            -0.008227558806538582,\n            0.0017004492692649364,\n            0.01164153777062893,\n            -0.00017873111937660724,\n            -0.015591812320053577,\n            0.007233449723571539,\n            -0.026710133999586105,\n            -0.012125511653721333,\n            0.006242610979825258,\n            0.028802994638681412,\n            -0.003665776224806905,\n            0.0559317022562027,\n            -0.020483873784542084,\n            -0.02412021905183792,\n            0.020261507481336594,\n            -0.013812880031764507,\n            -0.026474688202142715,\n            -0.0051405890844762325,\n            -0.010438142344355583,\n            -0.00013611769827548414,\n            0.01206010952591896,\n            0.006435546558350325,\n            0.014519221149384975,\n            0.03037264011800289,\n            -0.03427059203386307,\n            -0.024316424503922462,\n            -0.0001540010271128267,\n            0.009202047251164913,\n            0.017815476283431053,\n            -0.008744233287870884,\n            0.002892398973926902,\n            0.0256898645311594,\n            0.0469585619866848,\n            -0.010248477570712566,\n            0.005212530959397554,\n            -0.010065351612865925,\n            -0.0018590488471090794,\n            -0.013394308276474476,\n            -0.029797103255987167,\n            0.013263504952192307,\n            -0.008927359245717525,\n            -0.013642835430800915,\n            -0.005971193313598633,\n            -0.005438167601823807,\n            0.015997303649783134,\n            0.002414965070784092,\n            0.03599720448255539,\n            0.01852181740105152,\n            -0.004771068226546049,\n            0.022262806072831154,\n            -0.004682775586843491,\n            0.009561757557094097,\n            0.022916823625564575,\n            -0.007926709949970245,\n            0.00502286572009325,\n            -0.0013955129543319345,\n            0.029143085703253746,\n            0.01484623085707426,\n            -0.016154268756508827,\n            0.004708936437964439,\n            -0.009574837982654572,\n            0.003672316437587142,\n            -0.0075800796039402485,\n            0.021373338997364044,\n            -0.013106539845466614,\n            0.009156265296041965,\n            0.014479979872703552,\n            -0.015160159207880497,\n            -0.038116224110126495,\n            0.012321717105805874,\n            0.008770394138991833,\n            0.014022166840732098,\n            0.02932620979845524,\n            -0.011805042624473572,\n            0.01126874703913927,\n            -0.026723215356469154,\n            -0.011438791640102863,\n            -0.003992785699665546,\n            -0.024878881871700287,\n            -0.003567673498764634,\n            -0.0032553793862462044,\n            0.002880953485146165,\n            0.02859370969235897,\n            0.021268697455525398,\n            0.02275986038148403,\n            0.013982925564050674,\n            -0.01858721859753132,\n            0.026657812297344208,\n            -0.01485931035131216,\n            -0.008515327237546444,\n            -0.03887488692998886,\n            0.007665102370083332,\n            0.0019506115932017565,\n            0.005748826544731855,\n            0.028750672936439514,\n            -0.02125561609864235,\n            0.018456414341926575,\n            0.005987543612718582,\n            0.030084872618317604,\n            -0.011654618196189404,\n            0.00436230655759573,\n            0.010601647198200226,\n            -0.014048327691853046,\n            -0.0011944021098315716,\n            -0.026265401393175125,\n            -0.0008216113201342523,\n            -0.0036069145426154137,\n            -0.02414637990295887,\n            0.0029463553801178932,\n            -0.0019816774874925613,\n            -0.01186390407383442,\n            0.05901867151260376,\n            0.02950933575630188,\n            0.008633050136268139,\n            0.027599601075053215,\n            0.018443334847688675,\n            -0.005412006750702858,\n            0.0055330004543066025,\n            0.00016442444757558405,\n            -0.015984224155545235,\n            0.006406115833669901,\n            0.024224862456321716,\n            -0.019555166363716125,\n            0.02077164314687252,\n            -0.003317511174827814,\n            -0.0008420494268648326,\n            0.016939090564846992,\n            -0.023649325594305992,\n            0.00926744844764471,\n            0.026029955595731735,\n            -0.02639620564877987,\n            0.009444033727049828,\n            0.0167559664696455,\n            0.020967848598957062,\n            0.00742965517565608,\n            -0.040810782462358475,\n            -0.014401497319340706,\n            0.030529605224728584,\n            -0.014218372292816639,\n            -0.022890662774443626,\n            -0.04533659294247627,\n            0.01410064846277237,\n            0.0030133924447000027,\n            -0.03670354560017586,\n            -0.035212382674217224,\n            0.0008608524804003537,\n            0.010130753740668297,\n            0.007488517090678215,\n            -0.02770424261689186,\n            0.011981626972556114,\n            0.005774987395852804,\n            0.01628507301211357,\n            0.03427059203386307,\n            -0.02752111852169037,\n            -0.033407289534807205,\n            -0.016612080857157707,\n            0.01129490789026022,\n            -0.023099949583411217,\n            -0.03529086336493492,\n            -0.02149106375873089\n          ]\n        },\n        \"extra_info\": null,\n        \"id_map\": {\n          \"3e6bad2c-8631-4352-8c7b-77c2f207b8f0\": 7804824146984106564\n        },\n        \"nodes_dict\": {\n          \"7804824146984106564\": {\n            \"child_indices\": [],\n            \"doc_id\": \"6ef465ed-ca0c-4e6b-85e9-7d02b03bf63b\",\n            \"embedding\": null,\n            \"extra_info\": null,\n            \"index\": 0,\n            \"node_info\": {\n              \"end\": 5753,\n              \"start\": 0\n            },\n            \"ref_doc_id\": \"04983d90-d213-481b-a496-e83882124d36\",\n            \"text\": \"11ARTIFICIAL \\nINTELLIGENCE\\nIntroduction\\n2Outline\\n\\uf06cWhat is AI?\\n\\uf06cA brief history\\n\\uf06cWhat can AI do?\\n23What is Intelligence\\n4What is Intelligence\\n\\uf06cA wish-list of general characteristics of intelligence\\n\\uf06cPerception : manipulation, inter pretation of data provided by sensors\\n\\uf06cAction : control, and use of effectors  to accomplish a variety of task s \\n\\uf06cReasoning : deductive (logical) infer ence, inductive inference, \\n\\uf06cLearning : adapting behavior to better c ope with changing environments, \\ndiscovery of patterns, learning to reason, plan, and act.\\n\\uf06cCommunication : with other intelligent agents including humans using \\nsignals, signs, icons, \\u2026\\n\\uf06cPlanning : formulation of plans -- sequences or agenda of actions to \\naccomplish externally or internally determined goals\\n\\uf06c\\u2026\\n35What is AI?\\n\\uf06cThe exciting new effort to make computers think .. machines with \\nminds\\n\\uf06cAI is the art of creating machi nes that perform functions that \\nrequire intelligence w hen performed by humans\\n\\uf06cAI is the study of the computati ons that make it possible to \\nperceive, reason, and act\\n\\uf06cAI is the enterprise of desi gn and analysis of intelligent agen ts.\\n6Acting humanly: Turing Test\\n\\uf06cAlan Turing (1950) \\\"Computing machinery and intelligence\\\":\\n\\uf06c\\\"Can machines think?\\\" \\uf0e0\\\"Can machines behave intelligently?\\\"\\n\\uf06cOperational test for intelligent behavior: the Imitation Game\\n\\uf06cPredicted that by 2000, a machine might have a 30% chance of fo oling \\na lay person for 5 minutes\\n\\uf06cAnnual Loebner prize competition (since 1990)\\n\\uf06cHow good are current chatbots?\\n\\n47What is AI?\\nThinking humanly Thinking rationally \\nActing humanly Acting rationally \\n\\uf06cAre you concerned with thought processes/reasoning or behavior ?\\n\\uf06cDo you want to model humans or measure against an ideal concept of \\nintelligence, rationality\\n8Acting rationally: rational \\nagent\\n\\uf06cThis course is about designing rational agents\\n\\uf06cAdvocated by the textbook \\n\\uf06cRational behavior: doing the right thing\\n\\uf06cThe right thing: that which i s expected to maximize goal \\nachievement, given the available information\\n\\uf06cAn agent is an entity that perceives and acts\\n\\uf06cA rational agent is one that acts so as to achieve the best outcome\\n59Brief history of AI\\n\\uf06c1943     McCulloch & Pitts: model of artificial neurons \\n\\uf06c1950     Turing's \\\"Computing Machinery and Intelligence\\u201c\\n\\uf06c1956 McCarthy, Minsky, Newell, Simon, Turing, Uhr, et al., \\nDartmouth workshop: \\\"Artificial Intelligence\\\" adopted\\n\\uf06c1952\\u201469 Early enthusiasm, great expectations, optimism fueled \\nby early success on some problems thought to be hard (e.g., theorem proving) \\n\\uf06c1966\\u201473 Collapse in AI research: Progress was slower than \\nexpected, Unrealistic predictions, Herbert Simon (1957) chess champion in 10 years    AI discovers computational complexity\\n10Brief history of AI\\n\\uf06c1969\\u201479 Early development o f knowledge-based systems\\n\\uf06c1980--- AI becomes an industr y: Expert systems industry \\nbooms, then busts (88- 93): \\u201cAI Winter\\u201c\\n\\uf06c1986-- Neural networks return to popularity\\n\\uf06c1987-- AI becomes a science: rev olution in t he content and \\nmethodology of work in AI, great advance, active research field since\\n611Brief History of AI\\n\\uf06cMid 1990s-present :  The emer gence of intelligent agents \\n\\uf06cAI technologies continue to find applications in \\n\\uf06cinformation retrieval, \\n\\uf06cdata mining and knowledge discovery, \\n\\uf06ccustomizable software systems, \\n\\uf06csmart devices (e.g., homes, automobiles), \\n\\uf06cagile manufacturing systems, \\n\\uf06cautonomous vehicles, \\n\\uf06cBioinformatics\\n\\uf06cInternet tools: search engines, recommender systems\\n\\uf06cetc. \\n\\uf06cSteady progress on fundamental AI research problems \\ncontinues.\\n12Brief History of AI\\n\\uf06cSince 2006:  the emergence of deep learning\\n\\uf06cSuccessful large-scale real-world  applications (2011-present) i n \\n\\uf06cimage recognition\\n\\uf06cnatural language processing\\n\\uf06cspeech recognition\\n\\uf06cmachine translation\\n\\uf06c\\u2026\\n7Strong AI\\n\\uf06cNarrow/Weak AI\\n\\uf06cArtificial General Intelligence\\n\\uf06cSingularity: Ray Kurzweil, \\u201cThe singularity is near \\n(2005)\\u201d\\n\\uf06cTranscendence (2014) \\n\\uf06cEx Machina (2015): \\u201cOne day the AIs are going to look back on \\nus the same way we look at fossil skeletons on the plains of Africa. An upright ape living in dust with crude language and tools, all set for extinction.\\u201d \\n13\\n14What can AI do \\n\\uf06cDeep Blue defeated the reigning wo rld chess champion Garry Kasp arov in \\n1997 \\n\\uf06cChinook defeated human checkers c hampion in 1994, can't lose at  \\ncheckers 2007\\n\\uf06cThe IBM supercomputer Watson beat human champions on 'Jeopardy! \\u2018 \\n(2011)\\n\\uf06cAlphaGo beat a top Go player in  2016. AlphaGo Zer o stronger than any \\nhuman player (2017)\\n\\uf06cCMU's poker computer defeated the world\\u2019s best Texas Hold \\u2018em po ker \\nplayers head-to-head 2017, six-player July 2019\\n\\uf06cAI defeated human pros a t StarCraft II, Dota (2019)\\n815What can AI do \\n\\uf06cProved a mathematical conjecture (Robbins conjecture, 1996) uns olved \\nfor 60 years \\n\\uf06cDuring the 1991 Gulf War, US forces deployed an AI logistics pl anning \\nand scheduling program that involved up to 50,000 vehicles, car go, and \\npeople, save the US more money than spent on all AI research si nce \\n1950\\n\\uf06cNASA's on-board autonomous planning program controlled the scheduling of operations for a spacecraft (2000), Mars Explorat ion \\nRovers (2004).\\n16What can AI do \\n\\uf06cDriverless cars\\n\\uf06cNo hands across America (driving autonomously 98% of the time f rom Pittsburgh \\nto San Diego, 1995)\\n\\uf06cDARPA Grand Challenge prize competition for driverless cars (20 07)\\n\\uf06cDriverless vans completed 8000-mile trip from Italy to China (J uly-Oct 2010)\\n\\uf06cGoogle\\u2019s driverless cars, l egal in Nevada and California\\n\\uf06cGreat progress in image recognition, speech recognition, machin e \\ntranslation: Deep Learning\\n\\uf06cRobotic scientists make scientific  discoveries by itself (Scien ce 2009): \\nformulation of hypotheses and des igning of experiments to test them \\nAITopics site: https://aitopics.org/\"\n          }\n        },\n        \"text\": null\n      }\n    }\n  },\n  \"index_struct_id\": \"eb0ac69b-4d62-46ab-9c9e-7d40136c9c0d\",\n  \"vector_store\": {\n    \"simple_vector_store_data_dict\": {\n      \"embedding_dict\": {\n        \"3e6bad2c-8631-4352-8c7b-77c2f207b8f0\": [\n          -0.005091537721455097,\n          -0.0019130054861307144,\n          0.006239341106265783,\n          -0.012197453528642654,\n          -0.006677533499896526,\n          0.0006552452687174082,\n          -0.012413280084729195,\n          0.019502844661474228,\n          -0.007652021944522858,\n          -0.015290963463485241,\n          -0.0034041686449199915,\n          0.03727908059954643,\n          -0.025205891579389572,\n          0.011589216068387032,\n          0.02396325394511223,\n          -0.0008306040544994175,\n          0.027835046872496605,\n          0.012354418635368347,\n          0.017501547932624817,\n          -0.007272691000252962,\n          -0.03283175081014633,\n          0.011896604672074318,\n          -0.003453220007941127,\n          -0.014152970165014267,\n          -0.012197453528642654,\n          -0.005945032462477684,\n          0.032046929001808167,\n          -0.028227457776665688,\n          0.00804443284869194,\n          -0.010725910775363445,\n          0.011236045509576797,\n          -0.022498251870274544,\n          -0.03063424862921238,\n          -0.01899271085858345,\n          -0.014597702771425247,\n          0.009470194578170776,\n          0.002962705912068486,\n          -0.03149755299091339,\n          -0.0005632738466374576,\n          -0.013034597970545292,\n          0.03662506118416786,\n          0.0313405878841877,\n          -0.010359660722315311,\n          -0.009005840867757797,\n          -0.0122759360820055,\n          0.0357094369828701,\n          -0.013812880031764507,\n          -0.018940389156341553,\n          0.010843634605407715,\n          0.001841063378378749,\n          0.007625861093401909,\n          0.04429016262292862,\n          -0.004767798352986574,\n          0.0017266100039705634,\n          -0.018835745751857758,\n          -0.005258312448859215,\n          -0.016455117613077164,\n          0.01672980561852455,\n          0.0358140766620636,\n          -0.017553869634866714,\n          0.008548027835786343,\n          -0.006245880853384733,\n          -0.02971862070262432,\n          0.001044795266352594,\n          -0.02232820726931095,\n          -0.0036919370759278536,\n          -0.0034270593896508217,\n          0.03084353357553482,\n          -0.008626510389149189,\n          -0.013420469127595425,\n          0.02389785274863243,\n          0.022929904982447624,\n          0.010653968900442123,\n          0.012007787823677063,\n          0.02389785274863243,\n          0.014780828729271889,\n          -0.004856090992689133,\n          0.0006184566882438958,\n          -0.013865201734006405,\n          0.002962705912068486,\n          0.017763154581189156,\n          0.0027337991632521152,\n          0.00220731389708817,\n          0.011942386627197266,\n          0.016625162214040756,\n          0.01924123801290989,\n          0.009502895176410675,\n          0.01768467202782631,\n          -0.012805690988898277,\n          -0.008011732250452042,\n          -0.00016840988246258348,\n          0.0018018222181126475,\n          0.00929360929876566,\n          0.025166649371385574,\n          -0.009273989126086235,\n          0.03557863086462021,\n          -0.0014502870617434382,\n          0.0022857962176203728,\n          0.0006854936364106834,\n          -0.043740786612033844,\n          0.011419171467423439,\n          -0.01407448761165142,\n          -0.019568247720599174,\n          -0.013551272451877594,\n          0.004620643798261881,\n          -0.005013055168092251,\n          -0.009424413554370403,\n          0.012897253967821598,\n          0.03929346054792404,\n          0.011203344911336899,\n          0.00759970024228096,\n          0.028724512085318565,\n          0.02104632928967476,\n          -0.03293639421463013,\n          -0.00017658511933404952,\n          -0.006196829490363598,\n          0.021608786657452583,\n          -0.027651920914649963,\n          -0.03042496182024479,\n          -0.013204642571508884,\n          0.00806405395269394,\n          0.026435445994138718,\n          0.02279910072684288,\n          -0.017043733969330788,\n          0.021648027002811432,\n          -0.003072254126891494,\n          -0.0076454817317426205,\n          -0.01612810790538788,\n          -0.013335446827113628,\n          -0.0234923604875803,\n          -0.0013979654759168625,\n          -0.013106539845466614,\n          0.020941687747836113,\n          0.0335380919277668,\n          -0.02506200596690178,\n          -0.0054316273890435696,\n          -0.03769765421748161,\n          0.023296155035495758,\n          0.002382264006882906,\n          -0.016926011070609093,\n          0.017370743677020073,\n          0.012792610563337803,\n          -0.0019669621251523495,\n          -0.011556515470147133,\n          -0.01497703418135643,\n          0.03510773926973343,\n          0.004077808000147343,\n          0.004800498951226473,\n          -0.003695207182317972,\n          0.02885531634092331,\n          0.004937842953950167,\n          -0.0008829255821183324,\n          -0.019790614023804665,\n          0.013374688103795052,\n          -0.00916934572160244,\n          -0.0013243884313851595,\n          -0.006952221505343914,\n          -0.009437493979930878,\n          0.01029425859451294,\n          -0.0007778737926855683,\n          0.008639590814709663,\n          0.010222316719591618,\n          -0.022655216977000237,\n          0.023139191791415215,\n          0.0025833749677985907,\n          0.015304043889045715,\n          -0.013930603861808777,\n          0.0002509797632228583,\n          0.00346957053989172,\n          -0.02210584096610546,\n          -0.007717423606663942,\n          0.017593109980225563,\n          -0.0024820019025355577,\n          0.006697154138237238,\n          0.003613454755395651,\n          0.01466310489922762,\n          0.029640140011906624,\n          0.007979031652212143,\n          -0.032282374799251556,\n          0.006010434124618769,\n          0.008548027835786343,\n          -0.02035306952893734,\n          0.01567029394209385,\n          0.006353794131428003,\n          0.008410683833062649,\n          0.0019489765400066972,\n          0.020889366045594215,\n          -0.004826659802347422,\n          0.02031382918357849,\n          -0.02661857195198536,\n          0.008345281705260277,\n          0.0036003743298351765,\n          0.003973165061324835,\n          -0.0033681977074593306,\n          -0.6303696632385254,\n          -0.01463694404810667,\n          0.006141237914562225,\n          -0.012812231667339802,\n          0.00803789310157299,\n          0.02591223083436489,\n          0.010065351612865925,\n          -0.020052220672369003,\n          0.001958786742761731,\n          0.025389015674591064,\n          -0.0016146092675626278,\n          0.013034597970545292,\n          0.007697802968323231,\n          -0.011504193767905235,\n          -0.012903793714940548,\n          -0.017972441390156746,\n          -0.0015917186392471194,\n          -0.03597104176878929,\n          -0.0003517395816743374,\n          0.005202720873057842,\n          -0.001308855484239757,\n          0.03246550261974335,\n          -0.03149755299091339,\n          -0.01829945109784603,\n          -0.01899271085858345,\n          -0.008750773966312408,\n          0.008207937702536583,\n          0.007841687649488449,\n          0.005601672455668449,\n          0.02143874205648899,\n          -0.03604952618479729,\n          0.024682676419615746,\n          0.010333499871194363,\n          0.009254368022084236,\n          0.0424850732088089,\n          -0.014558462426066399,\n          -0.015447927638888359,\n          0.0470108836889267,\n          0.006373414769768715,\n          0.03223005309700966,\n          -0.006275312043726444,\n          -0.006340713705867529,\n          0.00951597560197115,\n          -0.0014314840082079172,\n          0.005748826544731855,\n          -0.004587943200021982,\n          -5.042460543336347e-06,\n          0.007266150787472725,\n          -0.0019064652733504772,\n          -0.025088166818022728,\n          0.002722353907302022,\n          0.010451222769916058,\n          -0.014519221149384975,\n          -0.0026356964372098446,\n          0.0026209810748696327,\n          -0.004077808000147343,\n          0.027416475117206573,\n          -0.006680803839117289,\n          0.01340738870203495,\n          -0.01921507716178894,\n          0.006801797077059746,\n          0.023636246100068092,\n          -0.02773040346801281,\n          -0.010928656905889511,\n          -0.03105282038450241,\n          0.038534797728061676,\n          -0.016599001362919807,\n          -0.013433549553155899,\n          -0.022092759609222412,\n          -0.02545441873371601,\n          0.011445331387221813,\n          0.024957364425063133,\n          -0.010091512463986874,\n          -0.012799151241779327,\n          0.010222316719591618,\n          0.00318507244810462,\n          0.02992790751159191,\n          0.012792610563337803,\n          -0.01811632513999939,\n          0.0007950417930260301,\n          -0.0009834810625761747,\n          -0.031131302937865257,\n          0.004643534775823355,\n          -0.04196185618638992,\n          0.014009086415171623,\n          -0.009234747849404812,\n          0.009522516280412674,\n          -0.0036101846490055323,\n          -0.012753370217978954,\n          0.0083845229819417,\n          0.030895855277776718,\n          0.029875585809350014,\n          -0.025755267590284348,\n          -0.0469847209751606,\n          -0.013485871255397797,\n          0.015225561335682869,\n          -0.00115761358756572,\n          -0.020261507481336594,\n          -0.008469545282423496,\n          -0.004502920433878899,\n          0.024499550461769104,\n          -0.014270693995058537,\n          0.020496955141425133,\n          0.04363614693284035,\n          0.003973165061324835,\n          0.015696454793214798,\n          0.012001248076558113,\n          0.00693914107978344,\n          0.022942984476685524,\n          -0.026016874238848686,\n          -0.01608886569738388,\n          -0.011242586188018322,\n          0.00016217626398429275,\n          0.0380639024078846,\n          0.010601647198200226,\n          -0.00357748381793499,\n          0.008456464856863022,\n          0.001831253059208393,\n          0.021150972694158554,\n          -0.019097354263067245,\n          0.019097354263067245,\n          -0.01039890106767416,\n          0.021530304104089737,\n          -0.024041736498475075,\n          -0.002956165699288249,\n          0.0016121567459776998,\n          -0.01699141226708889,\n          -0.016428956761956215,\n          -0.018940389156341553,\n          -0.017279181629419327,\n          0.0083845229819417,\n          -0.03262246400117874,\n          -0.01161537691950798,\n          -0.0006597416358999908,\n          0.023335397243499756,\n          -0.0067167747765779495,\n          0.0077239638194441795,\n          -0.0028956688474863768,\n          -0.012105890549719334,\n          0.00357748381793499,\n          -0.015238641761243343,\n          0.0025931852869689465,\n          0.025768347084522247,\n          -0.0013595419004559517,\n          -0.01785471849143505,\n          -0.015055516734719276,\n          -0.004372116643935442,\n          -0.013839040882885456,\n          -0.02281218208372593,\n          -0.0038194707594811916,\n          -0.02103324979543686,\n          0.00882271584123373,\n          0.0006290844758041203,\n          0.027154866605997086,\n          0.005376035813242197,\n          -0.024918122217059135,\n          -0.018508736044168472,\n          -0.025872990489006042,\n          -0.0004995070048607886,\n          -0.02640928514301777,\n          -0.02321767248213291,\n          0.022498251870274544,\n          -0.0031392909586429596,\n          0.01183120347559452,\n          0.0006691431626677513,\n          -0.004846280440688133,\n          0.008417223580181599,\n          0.033668894320726395,\n          -0.05451902002096176,\n          -0.05723974108695984,\n          0.006621941924095154,\n          -0.0009246193221770227,\n          0.020444633439183235,\n          0.026200000196695328,\n          -0.01589266024529934,\n          0.013152320869266987,\n          -0.011582675389945507,\n          -0.009215126745402813,\n          0.0058567398227751255,\n          -0.01553949061781168,\n          -0.013891362585127354,\n          0.014244533143937588,\n          0.010503544472157955,\n          -0.008894657716155052,\n          0.018377933651208878,\n          0.00237735896371305,\n          0.024277184158563614,\n          0.01039236132055521,\n          -0.02171343006193638,\n          0.006020244676619768,\n          0.012118970975279808,\n          0.03934578225016594,\n          0.018731102347373962,\n          -0.0018377932719886303,\n          -0.029143085703253746,\n          0.041883375495672226,\n          -0.013041137717664242,\n          -0.0016309597995132208,\n          0.001916275592520833,\n          0.032046929001808167,\n          0.021517224609851837,\n          0.008541488088667393,\n          -0.0007067492697387934,\n          -0.004715476650744677,\n          0.014453819021582603,\n          -0.013034597970545292,\n          0.014597702771425247,\n          -0.02060159668326378,\n          0.027442635968327522,\n          -0.011563055217266083,\n          0.02079780213534832,\n          -0.004774338565766811,\n          0.009659860283136368,\n          -0.024643434211611748,\n          -0.000886195688508451,\n          0.041883375495672226,\n          -0.014087568037211895,\n          0.01809016428887844,\n          -0.05572241544723511,\n          -0.004780878778547049,\n          0.02841058373451233,\n          0.031131302937865257,\n          -0.003528432222083211,\n          0.006602321285754442,\n          -0.006723314989358187,\n          0.028933798894286156,\n          0.021412581205368042,\n          0.025323614478111267,\n          -0.023126110434532166,\n          -0.04444712772965431,\n          -0.007972490973770618,\n          0.008757313713431358,\n          -0.004502920433878899,\n          0.009633699432015419,\n          0.030948176980018616,\n          0.026108436286449432,\n          0.00458467286080122,\n          -0.019149675965309143,\n          0.02749495767056942,\n          0.017828557640314102,\n          -0.015565651468932629,\n          -0.012341338209807873,\n          0.028462905436754227,\n          -0.04656615108251572,\n          0.016690563410520554,\n          0.01484623085707426,\n          0.036101847887039185,\n          0.016389714553952217,\n          -0.0167690459638834,\n          0.030346479266881943,\n          -0.014479979872703552,\n          -0.021634947508573532,\n          -0.0313405878841877,\n          0.007102645933628082,\n          -0.015173239633440971,\n          -0.015526410192251205,\n          0.02503584511578083,\n          -0.008646130561828613,\n          -0.006255691405385733,\n          0.03330264613032341,\n          0.016625162214040756,\n          -0.013865201734006405,\n          0.029169246554374695,\n          -0.008018272928893566,\n          -0.0024067896883934736,\n          -0.010876335203647614,\n          0.012249775230884552,\n          -0.005081727169454098,\n          -0.0070764850825071335,\n          0.007344632875174284,\n          0.00012038036220474169,\n          -0.002060159808024764,\n          0.015212480910122395,\n          -0.0031916124280542135,\n          0.036311130970716476,\n          0.013270044699311256,\n          0.005634373519569635,\n          -0.0005988361081108451,\n          0.03063424862921238,\n          0.02194887585937977,\n          -0.027992011979222298,\n          -0.043243732303380966,\n          -0.0010627808514982462,\n          0.025375936180353165,\n          0.00938517227768898,\n          1.3987319107400253e-06,\n          -0.019058112055063248,\n          0.0034139789640903473,\n          -0.009757963009178638,\n          -0.02037923038005829,\n          -0.010621268302202225,\n          0.018495656549930573,\n          -0.007959410548210144,\n          0.012995356693863869,\n          -0.01030079834163189,\n          -0.010889415629208088,\n          0.047978829592466354,\n          -0.0028106465470045805,\n          0.013956764712929726,\n          -0.014453819021582603,\n          0.021988118067383766,\n          -0.004803769290447235,\n          -0.008979680016636848,\n          -0.014571542851626873,\n          0.0256898645311594,\n          -0.010104592889547348,\n          -0.008534947410225868,\n          0.011020219884812832,\n          0.0076127806678414345,\n          -0.029640140011906624,\n          0.008626510389149189,\n          0.004012406338006258,\n          0.01130144763737917,\n          0.010791312903165817,\n          0.016428956761956215,\n          -0.00563764339312911,\n          0.0012303731637075543,\n          -0.01107908133417368,\n          0.01610194705426693,\n          -0.009928007610142231,\n          -0.00870499201118946,\n          -0.03379970043897629,\n          0.013786720111966133,\n          0.02749495767056942,\n          0.060902245342731476,\n          0.007508137729018927,\n          -0.022419769316911697,\n          0.021072490140795708,\n          -0.023060709238052368,\n          -0.017815476283431053,\n          -0.01309345941990614,\n          -0.016023464500904083,\n          0.023361558094620705,\n          0.020627757534384727,\n          -0.0058600096963346004,\n          0.013551272451877594,\n          0.023060709238052368,\n          -0.010281178168952465,\n          0.02791352942585945,\n          0.002557214116677642,\n          0.01230209693312645,\n          -0.0234792809933424,\n          0.01406140811741352,\n          -0.005346605088561773,\n          0.011471492238342762,\n          0.012956115417182446,\n          0.006124887615442276,\n          0.020052220672369003,\n          0.009797204285860062,\n          -0.033224161714315414,\n          0.021844232454895973,\n          0.008796554990112782,\n          -0.023204592987895012,\n          -0.01807708479464054,\n          0.005650723818689585,\n          0.01674288511276245,\n          -0.001294957590289414,\n          -0.022707538679242134,\n          -0.0009090863750316203,\n          0.024080978706479073,\n          0.01746230572462082,\n          0.02439490705728531,\n          -0.0026209810748696327,\n          -0.012001248076558113,\n          0.01785471849143505,\n          0.030006390064954758,\n          -0.006075836252421141,\n          -0.024460308253765106,\n          -0.0060692960396409035,\n          -0.02253749407827854,\n          0.031000498682260513,\n          0.04149096459150314,\n          -0.02992790751159191,\n          -0.023361558094620705,\n          0.010209236294031143,\n          0.012884173542261124,\n          -0.02060159668326378,\n          0.0075800796039402485,\n          -0.00972526241093874,\n          0.008469545282423496,\n          0.008678831160068512,\n          -0.0009417873225174844,\n          -0.002908749273046851,\n          -0.012779530137777328,\n          0.006242610979825258,\n          -0.0016759235877543688,\n          0.02415946125984192,\n          -0.0015933536924421787,\n          -0.011752720922231674,\n          -0.026644732803106308,\n          0.005441437941044569,\n          0.01695217192173004,\n          -0.009332850575447083,\n          0.01584033854305744,\n          0.0035545930732041597,\n          -0.01497703418135643,\n          -0.03827318921685219,\n          -0.021818073466420174,\n          0.014702346175909042,\n          0.013368147425353527,\n          0.020614678040146828,\n          -0.019542086869478226,\n          -0.015931902453303337,\n          0.010529705323278904,\n          0.008031352423131466,\n          -0.05797224119305611,\n          0.001975137274712324,\n          -0.03288407251238823,\n          0.01632431335747242,\n          -0.00453235162422061,\n          0.01331582572311163,\n          -0.0075800796039402485,\n          -0.0029447204433381557,\n          -0.010915576480329037,\n          -0.001937531167641282,\n          -0.0062491511926054955,\n          0.021281776949763298,\n          0.012047029100358486,\n          -0.0012540813768282533,\n          0.014401497319340706,\n          -0.011824662797152996,\n          0.013021517544984818,\n          0.0030640787445008755,\n          -0.040366049855947495,\n          -0.005601672455668449,\n          -0.0069326008670032024,\n          -0.018024763092398643,\n          -0.017815476283431053,\n          0.026304641738533974,\n          0.0025441336911171675,\n          0.005794608034193516,\n          0.0008714802679605782,\n          -0.02415946125984192,\n          -0.014257613569498062,\n          0.019175836816430092,\n          -0.0025604842230677605,\n          0.04742945358157158,\n          -0.010281178168952465,\n          -0.004365576431155205,\n          0.02529745362699032,\n          0.013564352877438068,\n          0.005065376870334148,\n          0.033616576343774796,\n          -0.012249775230884552,\n          -0.001862318953499198,\n          -0.025598302483558655,\n          0.007468896452337503,\n          -0.00023830815916880965,\n          -0.014270693995058537,\n          -0.007508137729018927,\n          0.005111158359795809,\n          -0.0071091861464083195,\n          -0.01629815250635147,\n          0.0007488516857847571,\n          -0.00592214148491621,\n          0.007606240455061197,\n          -0.007521218154579401,\n          -0.010039190761744976,\n          0.008443384431302547,\n          -0.009175886400043964,\n          -0.010084972716867924,\n          0.020169945433735847,\n          -0.021608786657452583,\n          0.0010251747444272041,\n          -0.010673589073121548,\n          -0.011700399219989777,\n          0.0022351099178195,\n          0.00869845226407051,\n          0.021137893199920654,\n          -0.03620649129152298,\n          9.289113222621381e-05,\n          0.024852721020579338,\n          0.005689965095371008,\n          0.03780229762196541,\n          0.00037933100247755647,\n          0.018024763092398643,\n          -0.02194887585937977,\n          -0.018024763092398643,\n          -0.005536270327866077,\n          -0.034087467938661575,\n          -0.01164153777062893,\n          -0.0189665500074625,\n          0.03272710740566254,\n          0.01387828215956688,\n          0.0012704317923635244,\n          -0.024734996259212494,\n          0.017580030485987663,\n          0.01196854654699564,\n          0.005117698572576046,\n          -0.017933199182152748,\n          0.023335397243499756,\n          0.0024280454963445663,\n          -0.015212480910122395,\n          -0.005941762123256922,\n          0.02549365907907486,\n          0.014793909154832363,\n          0.004836470354348421,\n          -0.0011755990562960505,\n          0.0010423427447676659,\n          0.014741587452590466,\n          -0.007593160029500723,\n          -0.014296854846179485,\n          -0.03730524331331253,\n          -0.02812281623482704,\n          -0.0015271343290805817,\n          0.027207188308238983,\n          0.002503257477656007,\n          0.00681487750262022,\n          -0.040339890867471695,\n          0.008750773966312408,\n          0.024643434211611748,\n          0.007880928926169872,\n          0.009777583181858063,\n          0.0011061095865443349,\n          0.021281776949763298,\n          0.021634947508573532,\n          0.002032364020124078,\n          0.023753968998789787,\n          0.0016685659065842628,\n          -0.0028041063342243433,\n          -0.006795256864279509,\n          -0.025768347084522247,\n          0.020065302029252052,\n          0.00926090870052576,\n          -0.006867199204862118,\n          0.0011396280024200678,\n          0.02725951001048088,\n          -0.00413012970238924,\n          0.007566999178379774,\n          -0.0041006989777088165,\n          0.006350524257868528,\n          0.005359685514122248,\n          0.007671642582863569,\n          -0.012903793714940548,\n          -0.0033780080266296864,\n          -0.029169246554374695,\n          -0.006350524257868528,\n          -0.01853489689528942,\n          0.014597702771425247,\n          0.009444033727049828,\n          0.013008437119424343,\n          0.03249166160821915,\n          -0.021595705300569534,\n          -0.0446564145386219,\n          0.03740988299250603,\n          -0.015461008064448833,\n          0.03304103761911392,\n          0.002691288013011217,\n          0.06880279630422592,\n          0.01632431335747242,\n          0.019372042268514633,\n          -0.0050294059328734875,\n          0.009836445562541485,\n          0.007351173087954521,\n          0.02391093410551548,\n          0.028907638043165207,\n          0.005474138539284468,\n          0.010843634605407715,\n          -0.011020219884812832,\n          -0.0035251621156930923,\n          0.005503569729626179,\n          -0.004077808000147343,\n          -0.050621066242456436,\n          0.010824013501405716,\n          0.009018921293318272,\n          0.005739016458392143,\n          -0.020300747826695442,\n          -0.01874418370425701,\n          -0.004793959204107523,\n          0.02838442288339138,\n          0.0018933848477900028,\n          -0.0049607339315116405,\n          -0.004355766344815493,\n          -0.016455117613077164,\n          -0.007364253513514996,\n          0.013943684287369251,\n          -0.007226909510791302,\n          0.021164054051041603,\n          0.006059485487639904,\n          -0.003417249070480466,\n          -0.00012293513282202184,\n          -0.01922815665602684,\n          0.004054917488247156,\n          0.006386495195329189,\n          0.002920194761827588,\n          0.010196155868470669,\n          -0.019529005512595177,\n          0.018613379448652267,\n          0.010215776041150093,\n          -0.0050751869566738605,\n          0.0013742573792114854,\n          -0.001970232231542468,\n          0.01612810790538788,\n          0.014453819021582603,\n          -0.020928606390953064,\n          -0.014218372292816639,\n          -0.014820070005953312,\n          -0.004888791590929031,\n          0.0009589553228579462,\n          -0.021307937800884247,\n          -0.013355066999793053,\n          0.008436844684183598,\n          -0.015107838436961174,\n          -0.0029430852737277746,\n          0.020431552082300186,\n          0.02031382918357849,\n          -0.02477423846721649,\n          -0.0011371754808351398,\n          0.00033518471173010767,\n          -0.016664402559399605,\n          0.010026110336184502,\n          -0.01095481775701046,\n          0.007638941518962383,\n          -0.0122759360820055,\n          -0.013564352877438068,\n          0.012210533954203129,\n          -0.01746230572462082,\n          -0.012511382810771465,\n          0.025663703680038452,\n          0.00316218170337379,\n          -0.001379980007186532,\n          0.04405471682548523,\n          -0.020496955141425133,\n          -0.0003053451073355973,\n          0.014702346175909042,\n          0.0257421862334013,\n          -0.02392401359975338,\n          -0.005820768885314465,\n          -0.025859909132122993,\n          -0.00742311542853713,\n          0.02503584511578083,\n          -0.02078472264111042,\n          -0.006108537316322327,\n          -0.016232751309871674,\n          -0.013668996281921864,\n          0.010810933075845242,\n          -0.02545441873371601,\n          -0.013361607678234577,\n          -0.002606265479698777,\n          0.01653360016644001,\n          -0.015696454793214798,\n          0.007089565508067608,\n          -0.025192810222506523,\n          -0.007867848500609398,\n          0.01635047420859337,\n          0.008364902809262276,\n          0.01488547120243311,\n          -0.015133998356759548,\n          0.03952890634536743,\n          -0.00541527708992362,\n          0.02684093825519085,\n          0.01309345941990614,\n          -0.020706240087747574,\n          -0.02410713955760002,\n          -0.01331582572311163,\n          0.004659885074943304,\n          -0.024015575647354126,\n          0.017514627426862717,\n          0.0010578756919130683,\n          0.013250424526631832,\n          -0.01565721444785595,\n          -0.00793979037553072,\n          -0.012472141534090042,\n          -0.0036069145426154137,\n          -0.01117718406021595,\n          0.012426360510289669,\n          -0.0033191463444381952,\n          0.004486570134758949,\n          -0.0004247035540174693,\n          -0.018888067454099655,\n          -0.005703045520931482,\n          -4.892368451692164e-05,\n          -0.042328108102083206,\n          0.021857313811779022,\n          0.0009360646363347769,\n          0.035421665757894516,\n          0.012812231667339802,\n          -0.0015990763204172254,\n          -0.032543983310461044,\n          -0.025859909132122993,\n          -0.03259630501270294,\n          -0.0031703568529337645,\n          0.00951597560197115,\n          0.007344632875174284,\n          0.013302745297551155,\n          0.0334596112370491,\n          0.000456178211607039,\n          0.022001197561621666,\n          -0.005081727169454098,\n          0.01700449362397194,\n          -0.01453230157494545,\n          -0.00827988050878048,\n          -0.023309236392378807,\n          -0.02882915548980236,\n          -0.027390314266085625,\n          -0.006520568858832121,\n          -0.007096105720847845,\n          -0.020235346630215645,\n          -0.007854768075048923,\n          -0.028018172830343246,\n          -0.007004543207585812,\n          0.021085571497678757,\n          0.010281178168952465,\n          -0.01880958490073681,\n          0.023178432136774063,\n          -0.017370743677020073,\n          -0.0017952820053324103,\n          0.020653918385505676,\n          0.007998651824891567,\n          -0.008848876692354679,\n          -0.00837798323482275,\n          -0.01195546705275774,\n          -0.028750672936439514,\n          -0.011151023209095001,\n          -0.0063930354081094265,\n          -0.000860034953802824,\n          0.009208586998283863,\n          0.018508736044168472,\n          -0.010019570589065552,\n          -0.01792011968791485,\n          -0.010313878767192364,\n          -0.012897253967821598,\n          0.027651920914649963,\n          0.007874388247728348,\n          -0.00720728887245059,\n          0.01252446323633194,\n          0.0016464927466586232,\n          -0.008515327237546444,\n          -0.04206649959087372,\n          -0.01007189229130745,\n          0.020235346630215645,\n          0.01590574160218239,\n          0.0029839614871889353,\n          -0.010065351612865925,\n          0.02257673442363739,\n          -0.021870393306016922,\n          0.007410035002976656,\n          -0.0358140766620636,\n          -0.009156265296041965,\n          0.012445980682969093,\n          0.00894697941839695,\n          -0.02948317490518093,\n          0.018417173996567726,\n          0.016232751309871674,\n          0.007802446372807026,\n          -0.0048168497160077095,\n          -0.00948327500373125,\n          -0.013433549553155899,\n          -0.03623265027999878,\n          -0.03479380905628204,\n          -0.005542810540646315,\n          -0.006808337289839983,\n          0.012164752930402756,\n          0.02974478155374527,\n          -0.02838442288339138,\n          -0.009450574405491352,\n          0.00206997012719512,\n          0.005575511604547501,\n          -0.02053619548678398,\n          -0.01083055417984724,\n          0.19746141135692596,\n          -0.011026759631931782,\n          0.011589216068387032,\n          0.013551272451877594,\n          -0.018940389156341553,\n          -0.0031768970657140017,\n          0.0380377434194088,\n          -0.01406140811741352,\n          -0.0190188717097044,\n          -0.01117718406021595,\n          -0.0038031202275305986,\n          0.008626510389149189,\n          -0.038560956716537476,\n          0.0009990140097215772,\n          0.011890064924955368,\n          -0.018443334847688675,\n          -0.026553170755505562,\n          -0.047717224806547165,\n          0.002261270536109805,\n          0.0106147276237607,\n          -0.015958063304424286,\n          -0.009581377729773521,\n          -0.021608786657452583,\n          -0.019411282613873482,\n          0.028802994638681412,\n          0.010156914591789246,\n          -0.013355066999793053,\n          0.00703070405870676,\n          0.02812281623482704,\n          -0.010601647198200226,\n          -0.01287109311670065,\n          -0.0014159510610625148,\n          0.005987543612718582,\n          0.007122266571968794,\n          -0.023374637588858604,\n          -0.0034205191768705845,\n          0.014022166840732098,\n          0.016455117613077164,\n          0.007619320880621672,\n          -0.03461068496108055,\n          0.003485921071842313,\n          0.028227457776665688,\n          0.011321067810058594,\n          -0.027207188308238983,\n          0.02438182756304741,\n          0.03280559182167053,\n          -0.003281540237367153,\n          -0.028462905436754227,\n          -0.030503444373607635,\n          0.023989414796233177,\n          -0.02328307554125786,\n          -0.003440139815211296,\n          -0.004735097289085388,\n          0.020444633439183235,\n          -0.011661157943308353,\n          -0.006880279630422592,\n          -0.004686045926064253,\n          0.01746230572462082,\n          0.02882915548980236,\n          -0.0007136982167139649,\n          -0.016677483916282654,\n          0.014362256042659283,\n          -0.000626223161816597,\n          0.026069195941090584,\n          -0.004117049276828766,\n          0.04360998421907425,\n          -0.023099949583411217,\n          0.0006805884768255055,\n          0.006756016053259373,\n          -0.008567648008465767,\n          0.0013407388469204307,\n          -0.03220389410853386,\n          -0.04423784092068672,\n          -0.008528407663106918,\n          -0.01017653476446867,\n          -0.032517824321985245,\n          0.030503444373607635,\n          0.039476584643125534,\n          0.0010088242124766111,\n          -0.0030444583389908075,\n          0.00014316884335130453,\n          0.015173239633440971,\n          -0.022223563864827156,\n          -0.0075800796039402485,\n          -0.013100000098347664,\n          -0.008430304005742073,\n          0.029561657458543777,\n          0.006046405527740717,\n          -0.004463679622858763,\n          0.0016873689601197839,\n          -0.018024763092398643,\n          -0.01969905197620392,\n          -0.0003376372915226966,\n          -0.009842985309660435,\n          0.005830578971654177,\n          -0.017946280539035797,\n          0.03693899139761925,\n          0.014689265750348568,\n          -0.00027979747392237186,\n          -0.0057913376949727535,\n          -0.017750075086951256,\n          0.0581292062997818,\n          0.011903145350515842,\n          0.026082275435328484,\n          0.004179181065410376,\n          9.876707918010652e-05,\n          0.004954193718731403,\n          0.013485871255397797,\n          0.006311282981187105,\n          -0.011092161759734154,\n          -0.008463005535304546,\n          -0.0491560660302639,\n          0.0035840237978845835,\n          0.007050324231386185,\n          0.0033780080266296864,\n          0.015801098197698593,\n          -0.028567548841238022,\n          -0.015369446016848087,\n          9.871598740573972e-05,\n          -0.009921467863023281,\n          -0.008090214803814888,\n          -0.00524523202329874,\n          -0.006514028646051884,\n          0.016389714553952217,\n          -0.020863205194473267,\n          -0.04151712357997894,\n          -0.022681377828121185,\n          0.01899271085858345,\n          0.003167086746543646,\n          -0.010326959192752838,\n          0.008803095668554306,\n          -0.039502743631601334,\n          -0.015173239633440971,\n          0.035918720066547394,\n          -0.00403202697634697,\n          0.0014265788486227393,\n          -0.00040528737008571625,\n          0.007632401306182146,\n          0.003816200653091073,\n          -0.009901846759021282,\n          -0.0019113704329356551,\n          0.011255666613578796,\n          0.009856065735220909,\n          0.024198701605200768,\n          0.003482650965452194,\n          -0.016036545857787132,\n          0.009130104444921017,\n          0.014440738596022129,\n          -0.021137893199920654,\n          -0.006912980228662491,\n          -0.010320419445633888,\n          0.02746879681944847,\n          0.022511333227157593,\n          0.0022514602169394493,\n          0.029587818309664726,\n          0.025598302483558655,\n          -0.013825960457324982,\n          0.01027463749051094,\n          -0.006052945274859667,\n          0.015788016840815544,\n          -0.04073230177164078,\n          0.006520568858832121,\n          0.015304043889045715,\n          -0.0014609148493036628,\n          -0.0201568640768528,\n          -0.02098092809319496,\n          -0.16366170346736908,\n          0.0023871692828834057,\n          0.02661857195198536,\n          -0.03149755299091339,\n          0.02480039931833744,\n          0.029012281447649002,\n          0.019162755459547043,\n          -0.012923414818942547,\n          0.0023054168559610844,\n          -0.01945052482187748,\n          0.01835177280008793,\n          -0.008783474564552307,\n          -0.0019031951669603586,\n          -0.02591223083436489,\n          -0.013152320869266987,\n          0.0030869694892317057,\n          -0.02389785274863243,\n          0.045938290655612946,\n          0.024682676419615746,\n          -0.006769096478819847,\n          0.015434847213327885,\n          -0.024015575647354126,\n          0.018600299954414368,\n          -0.00020897950162179768,\n          0.021765751764178276,\n          6.254056643228978e-05,\n          -0.0033681977074593306,\n          0.002016013488173485,\n          -0.005343334749341011,\n          -0.04264203459024429,\n          0.005055566783994436,\n          -0.03937194123864174,\n          0.014035247266292572,\n          0.004708936437964439,\n          0.03471532464027405,\n          -0.015931902453303337,\n          0.016245830804109573,\n          -0.013243883848190308,\n          -0.04766490310430527,\n          0.01308691967278719,\n          0.019162755459547043,\n          0.021870393306016922,\n          0.014898551627993584,\n          0.0019130054861307144,\n          0.00324229896068573,\n          -0.009712181985378265,\n          0.009319770149886608,\n          -0.015382526442408562,\n          0.010320419445633888,\n          -0.021268697455525398,\n          0.013682076707482338,\n          -0.019372042268514633,\n          0.02081088349223137,\n          -0.018600299954414368,\n          0.033198002725839615,\n          0.014898551627993584,\n          -0.012602945789694786,\n          -0.008757313713431358,\n          -0.00326518970541656,\n          -0.02192271500825882,\n          -0.004686045926064253,\n          -0.004990164656192064,\n          0.00452254107221961,\n          -0.0037900398019701242,\n          -0.007586619816720486,\n          -0.0037704193964600563,\n          -0.015238641761243343,\n          0.026448527351021767,\n          -0.02077164314687252,\n          0.02075856178998947,\n          -0.005689965095371008,\n          -0.007168047595769167,\n          -0.005853469483554363,\n          -0.025415176525712013,\n          -0.013028057292103767,\n          0.018678780645132065,\n          -0.014349175617098808,\n          -0.01240019965916872,\n          0.0074361953884363174,\n          0.008397603407502174,\n          -0.01208627037703991,\n          0.021726509556174278,\n          -0.024316424503922462,\n          -0.008986220695078373,\n          -0.007933249697089195,\n          0.003279905067756772,\n          0.015304043889045715,\n          -0.002293971600010991,\n          -0.006926060654222965,\n          0.0016759235877543688,\n          0.020510034635663033,\n          -0.028096655383706093,\n          -0.04633070155978203,\n          0.0019440713804215193,\n          -0.0009981964249163866,\n          0.001741325482726097,\n          0.016180429607629776,\n          -0.028279779478907585,\n          -0.0011216425336897373,\n          -0.02977094240486622,\n          0.009175886400043964,\n          -0.0018198078032582998,\n          0.014283774420619011,\n          -0.016808288171887398,\n          -0.004123589489609003,\n          0.006474787835031748,\n          -0.00508499750867486,\n          -0.012249775230884552,\n          0.027390314266085625,\n          -0.01628507301211357,\n          0.026226161047816277,\n          -0.009522516280412674,\n          0.017540788277983665,\n          0.03890104964375496,\n          -0.02077164314687252,\n          0.011543435044586658,\n          -0.023361558094620705,\n          -0.013171941973268986,\n          0.00045290813432075083,\n          -0.0067167747765779495,\n          0.01655976101756096,\n          0.006141237914562225,\n          -0.01967289112508297,\n          0.03419211134314537,\n          -0.02232820726931095,\n          -0.025990713387727737,\n          -0.09674248844385147,\n          -0.03680818900465965,\n          -0.00026957844966091216,\n          0.022511333227157593,\n          -0.011994707398116589,\n          0.03288407251238823,\n          -0.009757963009178638,\n          0.005686694756150246,\n          -0.03037264011800289,\n          0.025807587429881096,\n          -0.01768467202782631,\n          -0.01812940649688244,\n          -0.01365591585636139,\n          -0.018914228305220604,\n          0.01038582157343626,\n          -0.013760559260845184,\n          -0.009476734325289726,\n          -0.03649425879120827,\n          -0.01746230572462082,\n          0.01879650540649891,\n          0.011046379804611206,\n          0.004872441291809082,\n          0.017082976177334785,\n          -0.024251023307442665,\n          -0.002027458744123578,\n          -0.009869146160781384,\n          -0.035892561078071594,\n          0.021752670407295227,\n          0.01964673027396202,\n          -0.002217124216258526,\n          -0.010686669498682022,\n          -0.011713479645550251,\n          0.007272691000252962,\n          -0.029849424958229065,\n          0.01161537691950798,\n          -0.0009916562121361494,\n          0.003891412867233157,\n          -0.020012980327010155,\n          0.023178432136774063,\n          -0.029195405542850494,\n          -0.015133998356759548,\n          -0.0020078381057828665,\n          0.008659210987389088,\n          -0.003518621902912855,\n          -0.0023005118127912283,\n          -0.01497703418135643,\n          -0.0013758924324065447,\n          0.012203994207084179,\n          -0.011903145350515842,\n          -0.03272710740566254,\n          -0.008548027835786343,\n          -0.019411282613873482,\n          -0.02366240695118904,\n          0.012203994207084179,\n          0.00033273216104134917,\n          -0.0002947172906715423,\n          0.036782026290893555,\n          -0.015709536150097847,\n          -0.0035938341170549393,\n          0.02656625024974346,\n          0.005428357515484095,\n          0.013616674579679966,\n          -0.019372042268514633,\n          0.006272041704505682,\n          0.00254249875433743,\n          0.0007427202654071152,\n          -0.020863205194473267,\n          -0.0020307288505136967,\n          0.0022122191730886698,\n          -0.03843015432357788,\n          0.007279231213033199,\n          0.020444633439183235,\n          -0.006049675401300192,\n          0.004375386983156204,\n          -0.009973789565265179,\n          -0.010562405921518803,\n          -0.001448652008548379,\n          -0.015356365591287613,\n          0.01672980561852455,\n          0.00592214148491621,\n          -0.032099250704050064,\n          -0.02524513192474842,\n          -0.007913629524409771,\n          0.002585009904578328,\n          0.006294932682067156,\n          0.0018868447514250875,\n          -0.01812940649688244,\n          -0.015395605936646461,\n          0.009672940708696842,\n          -0.02328307554125786,\n          -0.011634997092187405,\n          0.03714827820658684,\n          0.012570244260132313,\n          0.007233449723571539,\n          0.0009973789565265179,\n          -0.001406140741892159,\n          -0.014388416893780231,\n          -0.009705641306936741,\n          0.027128705754876137,\n          0.02595147304236889,\n          -0.004453869070857763,\n          -0.0022514602169394493,\n          -0.0513797290623188,\n          0.021321017295122147,\n          -0.008345281705260277,\n          -0.020457712933421135,\n          -0.003809660440310836,\n          -0.0010047366376966238,\n          0.020418472588062286,\n          -0.02415946125984192,\n          -0.0033518471755087376,\n          0.006579430773854256,\n          -0.04141248017549515,\n          0.011255666613578796,\n          -0.010425061918795109,\n          -0.006841038353741169,\n          -0.012949575670063496,\n          0.001469907583668828,\n          0.028253618627786636,\n          0.0015091487439349294,\n          -0.01027463749051094,\n          -0.002521243179216981,\n          -0.0012614390579983592,\n          0.0008445020066574216,\n          0.030058711767196655,\n          -0.0006335808429867029,\n          -0.01385212130844593,\n          0.00882271584123373,\n          -0.0256898645311594,\n          0.02640928514301777,\n          -0.008227558806538582,\n          0.0017004492692649364,\n          0.01164153777062893,\n          -0.00017873111937660724,\n          -0.015591812320053577,\n          0.007233449723571539,\n          -0.026710133999586105,\n          -0.012125511653721333,\n          0.006242610979825258,\n          0.028802994638681412,\n          -0.003665776224806905,\n          0.0559317022562027,\n          -0.020483873784542084,\n          -0.02412021905183792,\n          0.020261507481336594,\n          -0.013812880031764507,\n          -0.026474688202142715,\n          -0.0051405890844762325,\n          -0.010438142344355583,\n          -0.00013611769827548414,\n          0.01206010952591896,\n          0.006435546558350325,\n          0.014519221149384975,\n          0.03037264011800289,\n          -0.03427059203386307,\n          -0.024316424503922462,\n          -0.0001540010271128267,\n          0.009202047251164913,\n          0.017815476283431053,\n          -0.008744233287870884,\n          0.002892398973926902,\n          0.0256898645311594,\n          0.0469585619866848,\n          -0.010248477570712566,\n          0.005212530959397554,\n          -0.010065351612865925,\n          -0.0018590488471090794,\n          -0.013394308276474476,\n          -0.029797103255987167,\n          0.013263504952192307,\n          -0.008927359245717525,\n          -0.013642835430800915,\n          -0.005971193313598633,\n          -0.005438167601823807,\n          0.015997303649783134,\n          0.002414965070784092,\n          0.03599720448255539,\n          0.01852181740105152,\n          -0.004771068226546049,\n          0.022262806072831154,\n          -0.004682775586843491,\n          0.009561757557094097,\n          0.022916823625564575,\n          -0.007926709949970245,\n          0.00502286572009325,\n          -0.0013955129543319345,\n          0.029143085703253746,\n          0.01484623085707426,\n          -0.016154268756508827,\n          0.004708936437964439,\n          -0.009574837982654572,\n          0.003672316437587142,\n          -0.0075800796039402485,\n          0.021373338997364044,\n          -0.013106539845466614,\n          0.009156265296041965,\n          0.014479979872703552,\n          -0.015160159207880497,\n          -0.038116224110126495,\n          0.012321717105805874,\n          0.008770394138991833,\n          0.014022166840732098,\n          0.02932620979845524,\n          -0.011805042624473572,\n          0.01126874703913927,\n          -0.026723215356469154,\n          -0.011438791640102863,\n          -0.003992785699665546,\n          -0.024878881871700287,\n          -0.003567673498764634,\n          -0.0032553793862462044,\n          0.002880953485146165,\n          0.02859370969235897,\n          0.021268697455525398,\n          0.02275986038148403,\n          0.013982925564050674,\n          -0.01858721859753132,\n          0.026657812297344208,\n          -0.01485931035131216,\n          -0.008515327237546444,\n          -0.03887488692998886,\n          0.007665102370083332,\n          0.0019506115932017565,\n          0.005748826544731855,\n          0.028750672936439514,\n          -0.02125561609864235,\n          0.018456414341926575,\n          0.005987543612718582,\n          0.030084872618317604,\n          -0.011654618196189404,\n          0.00436230655759573,\n          0.010601647198200226,\n          -0.014048327691853046,\n          -0.0011944021098315716,\n          -0.026265401393175125,\n          -0.0008216113201342523,\n          -0.0036069145426154137,\n          -0.02414637990295887,\n          0.0029463553801178932,\n          -0.0019816774874925613,\n          -0.01186390407383442,\n          0.05901867151260376,\n          0.02950933575630188,\n          0.008633050136268139,\n          0.027599601075053215,\n          0.018443334847688675,\n          -0.005412006750702858,\n          0.0055330004543066025,\n          0.00016442444757558405,\n          -0.015984224155545235,\n          0.006406115833669901,\n          0.024224862456321716,\n          -0.019555166363716125,\n          0.02077164314687252,\n          -0.003317511174827814,\n          -0.0008420494268648326,\n          0.016939090564846992,\n          -0.023649325594305992,\n          0.00926744844764471,\n          0.026029955595731735,\n          -0.02639620564877987,\n          0.009444033727049828,\n          0.0167559664696455,\n          0.020967848598957062,\n          0.00742965517565608,\n          -0.040810782462358475,\n          -0.014401497319340706,\n          0.030529605224728584,\n          -0.014218372292816639,\n          -0.022890662774443626,\n          -0.04533659294247627,\n          0.01410064846277237,\n          0.0030133924447000027,\n          -0.03670354560017586,\n          -0.035212382674217224,\n          0.0008608524804003537,\n          0.010130753740668297,\n          0.007488517090678215,\n          -0.02770424261689186,\n          0.011981626972556114,\n          0.005774987395852804,\n          0.01628507301211357,\n          0.03427059203386307,\n          -0.02752111852169037,\n          -0.033407289534807205,\n          -0.016612080857157707,\n          0.01129490789026022,\n          -0.023099949583411217,\n          -0.03529086336493492,\n          -0.02149106375873089\n        ]\n      },\n      \"text_id_to_doc_id\": {\n        \"3e6bad2c-8631-4352-8c7b-77c2f207b8f0\": \"04983d90-d213-481b-a496-e83882124d36\"\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "notebooks/langchain.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"source: https://langchain.readthedocs.io/en/latest/modules/document_loaders/examples/pdf.html#\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Note: you may need to restart the kernel to use updated packages.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"%pip install faiss-cpu pymupdf chromadb -q\\n\",\n    \"\\n\",\n    \"# %pip install \\\"unstructured[local-inference]\\\" -q\\n\",\n    \"# %pip install \\\"detectron2@git+https://github.com/facebookresearch/detectron2.git@v0.6#egg=detectron2\\\" -q\\n\",\n    \"# %brew install poppler\\n\",\n    \"# at the end, still had errors\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## PyPDF\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 9,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"pages 33\\n\",\n      \"page 1 \\n\",\n      \" Bayesian Data Analysis\\n\",\n      \"Module 3: Models with more than one parameter\\n\",\n      \"Stat 474/574\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from langchain.document_loaders import PyPDFLoader\\n\",\n    \"\\n\",\n    \"loader = PyPDFLoader(\\\"pdfs/09_The Normal (mu, sigma) model.pdf\\\")\\n\",\n    \"pages = loader.load_and_split()\\n\",\n    \"\\n\",\n    \"print(\\\"pages\\\" , len(pages))\\n\",\n    \"print(\\\"page 1 \\\\n\\\", pages[0].page_content)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 13,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"\\n\",\n      \" page 25 \\n\",\n      \" Conjugate prior for the normal model (cont'd)\\n\",\n      \"Interpretation of posterior parameters:\\n\",\n      \"As before,\\u0016nis a weighted average of the prior mean and the sample\\n\",\n      \"mean.\\n\",\n      \"The posterior \\\\guess\\\" \\u0017n\\u001b2\\n\",\n      \"nis the sum of the sample sum of squared\\n\",\n      \"deviations, the prior sum of squared deviations, and additional\\n\",\n      \"uncertainty due to the di\\u000berence between the sample mean and the\\n\",\n      \"prior mean.\\n\",\n      \"Stat 474/574 (ISU) Spring, 2023 26 / 33\\n\",\n      \"\\n\",\n      \" page 21 \\n\",\n      \" Conjugate prior for the normal model\\n\",\n      \"Recall that using a non-informative prior, we found that\\n\",\n      \"p(\\u0016j\\u001b2;y)/N(\\u0016y;\\u001b2=n)\\n\",\n      \"p(\\u001b2jy)/Inv\\u0000\\u001f2(n\\u00001;s2):\\n\",\n      \"Then, factoring p(\\u0016;\\u001b2) =p(\\u0016j\\u001b2)p(\\u001b2) the conjugate prior for \\u001b2\\n\",\n      \"would also be scaled inverse \\u001f2and for\\u0016(conditional on \\u001b2) would\\n\",\n      \"be normal.\\n\",\n      \"Stat 474/574 (ISU) Spring, 2023 22 / 33\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from langchain.vectorstores import FAISS\\n\",\n    \"from langchain.embeddings.openai import OpenAIEmbeddings\\n\",\n    \"\\n\",\n    \"faiss_index = FAISS.from_documents(pages, OpenAIEmbeddings())\\n\",\n    \"docs = faiss_index.similarity_search(\\\"What is conjugate prior for mean?\\\", k=2)\\n\",\n    \"for doc in docs:\\n\",\n    \"    print(f\\\"\\\\n page {str(doc.metadata['page'])} \\\\n {doc.page_content}\\\")\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## pymupdf\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 29,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Conjugate prior for the normal model (cont’d)\\n\",\n      \"Interpretation of posterior parameters:\\n\",\n      \"As before, µn is a weighted average of the prior mean and the sample\\n\",\n      \"mean.\\n\",\n      \"The posterior “guess” νnσ2\\n\",\n      \"n is the sum of the sample sum of squared\\n\",\n      \"deviations, the prior sum of squared deviations, and additional\\n\",\n      \"uncertainty due to the diﬀerence between the sample mean and the\\n\",\n      \"prior mean.\\n\",\n      \"Stat 474/574 (ISU)\\n\",\n      \"Spring, 2023\\n\",\n      \"26 / 33\\n\",\n      \"\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from langchain.document_loaders import PyMuPDFLoader\\n\",\n    \"loader = PyMuPDFLoader(\\\"pdfs/09_The Normal (mu, sigma) model.pdf\\\")\\n\",\n    \"\\n\",\n    \"data = loader.load()\\n\",\n    \"data[0]\\n\",\n    \"\\n\",\n    \"print(data[25].page_content)\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## VectorDB QA\\n\",\n    \"\\n\",\n    \"https://langchain.readthedocs.io/en/latest/modules/chat/examples/vector_db_qa_with_sources.html\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 38,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from langchain.embeddings.openai import OpenAIEmbeddings\\n\",\n    \"from langchain.text_splitter import CharacterTextSplitter\\n\",\n    \"from langchain.vectorstores import Chroma\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 39,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from langchain.document_loaders import PyMuPDFLoader\\n\",\n    \"loader = PyMuPDFLoader(\\\"pdfs/09_The Normal (mu, sigma) model.pdf\\\")\\n\",\n    \"\\n\",\n    \"documents = loader.load()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 40,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Running Chroma using direct local API.\\n\",\n      \"Using DuckDB in-memory for database. Data will be transient.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)\\n\",\n    \"documents = text_splitter.split_documents(documents)\\n\",\n    \"embeddings = OpenAIEmbeddings()\\n\",\n    \"docsearch = Chroma.from_documents(documents, embeddings)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 44,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from langchain.chat_models import ChatOpenAI\\n\",\n    \"from langchain.prompts.chat import (\\n\",\n    \"    ChatPromptTemplate,\\n\",\n    \"    SystemMessagePromptTemplate,\\n\",\n    \"    AIMessagePromptTemplate,\\n\",\n    \"    HumanMessagePromptTemplate,\\n\",\n    \")\\n\",\n    \"from langchain.schema import (\\n\",\n    \"    AIMessage,\\n\",\n    \"    HumanMessage,\\n\",\n    \"    SystemMessage\\n\",\n    \")\\n\",\n    \"\\n\",\n    \"system_template=\\\"\\\"\\\"Use the following pieces of context to answer the users question. \\n\",\n    \"If you don't know the answer, just say that you don't know, don't try to make up an answer.\\n\",\n    \"----------------\\n\",\n    \"{context}\\\"\\\"\\\"\\n\",\n    \"messages = [\\n\",\n    \"    SystemMessagePromptTemplate.from_template(system_template),\\n\",\n    \"    HumanMessagePromptTemplate.from_template(\\\"{question}\\\")\\n\",\n    \"]\\n\",\n    \"prompt = ChatPromptTemplate.from_messages(messages)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 46,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from langchain.chains import VectorDBQA\\n\",\n    \"\\n\",\n    \"chain_type_kwargs = {\\\"prompt\\\": prompt}\\n\",\n    \"qa = VectorDBQA.from_chain_type(llm=ChatOpenAI(), chain_type=\\\"stuff\\\", vectorstore=docsearch, chain_type_kwargs=chain_type_kwargs)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 56,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"res = qa.run(\\\"what is joint posterior distribution, conditional posterior, and marginal posterior of normal? Provide the notations and an english translation\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 69,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"The joint posterior distribution of normal in this context is denoted as p(µ, σ2|y), which represents the probability\\n\",\n      \"distribution of the two parameters (mean and variance) after observing the data y. This distribution can be obtained by\\n\",\n      \"multiplying the prior distribution of the two parameters with the likelihood function of the data, and is used to make\\n\",\n      \"inference about both parameters simultaneously.  The conditional posterior of normal, specifically of the mean parameter\\n\",\n      \"µ given the variance parameter σ2 and the data y, is denoted as µ|σ2, y. This represents the probability distribution of\\n\",\n      \"the mean parameter after observing the data y, conditional on the value of the variance parameter σ2. This distribution\\n\",\n      \"can be obtained from the joint posterior by using Bayes' theorem and isolating the mean parameter as the variable of\\n\",\n      \"interest.  The marginal posterior of normal is denoted as p(θ1|y), and represents the probability distribution of the\\n\",\n      \"variable of interest (in this case, the mean parameter) after observing the data y, marginalizing over the other\\n\",\n      \"parameter(s) (in this case, the variance parameter). This distribution can be obtained by integrating the joint\\n\",\n      \"posterior distribution over the nuisance parameter, resulting in a probability distribution that only involves the\\n\",\n      \"variable of interest.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from pprint import pprint\\n\",\n    \"\\n\",\n    \"# wrap text in python\\n\",\n    \"import textwrap\\n\",\n    \"\\n\",\n    \"def wrap_text(text, width=120):\\n\",\n    \"    return \\\"\\\\n\\\".join(textwrap.wrap(text, width=width))\\n\",\n    \"\\n\",\n    \"print(wrap_text(res))\"\n   ]\n  },\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"## multiple documents\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"import os\\n\",\n    \"\\n\",\n    \"pdfs = os.listdir(\\\"pdfs\\\")\\n\",\n    \"loaders = []\\n\",\n    \"for pdf in pdfs:\\n\",\n    \"    loaders.append(PyMuPDFLoader(f\\\"pdfs/{pdf}\\\"))\\n\",\n    \"\\n\",\n    \"docs = []\\n\",\n    \"for loader in loaders:\\n\",\n    \"    docs.extend(loader.load())\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"print(docs)\"\n   ]\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"base\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.9.10\"\n  },\n  \"orig_nbformat\": 4\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "notebooks/langchain_llama.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"attachments\": {},\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# LlamaIndex with Langchain\\n\",\n    \"\\n\",\n    \"source: https://github.com/jerryjliu/gpt_index/blob/main/examples/langchain_demo/LangchainDemo.ipynb\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"Note: you may need to restart the kernel to use updated packages.\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"%pip install -Uq langchain llama-index\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from IPython.display import Markdown, display\\n\",\n    \"from langchain.agents import Tool\\n\",\n    \"from langchain.memory import ConversationBufferMemory\\n\",\n    \"from langchain.chat_models import ChatOpenAI\\n\",\n    \"\\n\",\n    \"from langchain.agents import initialize_agent\\n\",\n    \"from llama_index import GPTSimpleVectorIndex\\n\",\n    \"\\n\",\n    \"import logging\\n\",\n    \"import sys\\n\",\n    \"\\n\",\n    \"logging.basicConfig(stream=sys.stdout, level=logging.INFO)\\n\",\n    \"logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 2,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"index = GPTSimpleVectorIndex.load_from_disk('index.json')\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"tools = [\\n\",\n    \"    Tool(\\n\",\n    \"        name = \\\"lecture 1 Index\\\",\\n\",\n    \"        func=lambda q: str(index.query(q)),\\n\",\n    \"        description=\\\"Useful to answering questions about lecture 1: intro to AI.\\\",\\n\",\n    \"        return_direct=True\\n\",\n    \"    ),\\n\",\n    \"]\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"memory = ConversationBufferMemory(return_messages=True)\\n\",\n    \"llm = ChatOpenAI(temperature=0)\\n\",\n    \"agent_chain = initialize_agent(tools, llm, agent=\\\"conversational-react-description\\\", memory=memory)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 5,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:llama_index.token_counter.token_counter:> [query] Total LLM token usage: 1878 tokens\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"> [query] Total LLM token usage: 1878 tokens\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:llama_index.token_counter.token_counter:> [query] Total embedding token usage: 1 tokens\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"> [query] Total embedding token usage: 1 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"res = agent_chain.run(input=\\\"What does this lecture talk about? Summarize it for me\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 6,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"<b>\\n\",\n       \"AI is the art of creating machines that perform functions that require intelligence when performed by humans. AI is the study of the computations that make it possible to perceive, reason, and act. AI is the enterprise of design and analysis of intelligent agents. AI has a long history, beginning with Alan Turing's 1950 paper \\\"Computing Machinery and Intelligence\\\" which proposed the Imitation Game as an operational test for intelligent behavior. AI technologies have been applied to a variety of tasks, including information retrieval, data mining, customizable software systems, smart devices, autonomous vehicles, and more. AI has been used to solve mathematical conjectures, plan logistics, and control spacecraft. AI has also been used to create driverless cars, and has seen a resurgence in popularity with the emergence of deep learning and its successful applications in image recognition, natural language processing, speech recognition, and machine translation.</b>\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"display(Markdown(f\\\"<b>{res}</b>\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 7,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'agent': ChatAgent(llm_chain=LLMChain(memory=None, callback_manager=<langchain.callbacks.shared.SharedCallbackManager object at 0x103bd8220>, verbose=False, prompt=ChatPromptTemplate(input_variables=['input', 'agent_scratchpad'], output_parser=None, partial_variables={}, messages=[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], output_parser=None, partial_variables={}, template='Answer the following questions as best you can. You have access to the following tools:\\\\n\\\\nlecture 1 Index: Useful to answering questions about lecture 1: intro to AI.\\\\n\\\\nThe way you use the tools is by specifying a json blob.\\\\nSpecifically, this json should have a `action` key (with the name of the tool to use) and a `action_input` key (with the input to the tool going here).\\\\n\\\\nThe only values that should be in the \\\"action\\\" field are: lecture 1 Index\\\\n\\\\nThe $JSON_BLOB should only contain a SINGLE action, do NOT return a list of multiple actions. Here is an example of a valid $JSON_BLOB:\\\\n\\\\n```\\\\n{{\\\\n  \\\"action\\\": $TOOL_NAME,\\\\n  \\\"action_input\\\": $INPUT\\\\n}}\\\\n```\\\\n\\\\nALWAYS use the following format:\\\\n\\\\nQuestion: the input question you must answer\\\\nThought: you should always think about what to do\\\\nAction:\\\\n```\\\\n$JSON_BLOB\\\\n```\\\\nObservation: the result of the action\\\\n... (this Thought/Action/Observation can repeat N times)\\\\nThought: I now know the final answer\\\\nFinal Answer: the final answer to the original input question\\\\n\\\\nBegin! Reminder to always use the exact characters `Final Answer` when responding.', template_format='f-string', validate_template=True), additional_kwargs={}), HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['agent_scratchpad', 'input'], output_parser=None, partial_variables={}, template='{input}\\\\n\\\\n{agent_scratchpad}', template_format='f-string', validate_template=True), additional_kwargs={})]), llm=ChatOpenAI(verbose=False, callback_manager=<langchain.callbacks.shared.SharedCallbackManager object at 0x103bd8220>, client=<class 'openai.api_resources.chat_completion.ChatCompletion'>, model_name='gpt-3.5-turbo', model_kwargs={'temperature': 0}, openai_api_key=None, request_timeout=60, max_retries=6, streaming=False, n=1, max_tokens=None), output_key='text'), allowed_tools=['lecture 1 Index'], return_values=['output']),\\n\",\n      \" 'callback_manager': <langchain.callbacks.shared.SharedCallbackManager object at 0x103bd8220>,\\n\",\n      \" 'early_stopping_method': 'force',\\n\",\n      \" 'max_iterations': 15,\\n\",\n      \" 'memory': ConversationBufferMemory(chat_memory=ChatMessageHistory(messages=[HumanMessage(content='What does this lecture talk about? Summarize it for me', additional_kwargs={}), AIMessage(content='\\\\nAI is the art of creating machines that perform functions that require intelligence when performed by humans. AI is the study of the computations that make it possible to perceive, reason, and act. AI is the enterprise of design and analysis of intelligent agents. AI has a long history, beginning with Alan Turing\\\\'s 1950 paper \\\"Computing Machinery and Intelligence\\\" which proposed the Imitation Game as an operational test for intelligent behavior. AI technologies have been applied to a variety of tasks, including information retrieval, data mining, customizable software systems, smart devices, autonomous vehicles, and more. AI has been used to solve mathematical conjectures, plan logistics, and control spacecraft. AI has also been used to create driverless cars, and has seen a resurgence in popularity with the emergence of deep learning and its successful applications in image recognition, natural language processing, speech recognition, and machine translation.', additional_kwargs={})]), output_key=None, input_key=None, return_messages=True, human_prefix='Human', ai_prefix='AI', memory_key='history'),\\n\",\n      \" 'return_intermediate_steps': False,\\n\",\n      \" 'tools': [Tool(name='lecture 1 Index', description='Useful to answering questions about lecture 1: intro to AI.', return_direct=True, verbose=False, callback_manager=<langchain.callbacks.shared.SharedCallbackManager object at 0x103bd8220>, func=<function <lambda> at 0x12873e9d0>, coroutine=None)],\\n\",\n      \" 'verbose': False}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"from pprint import pprint\\n\",\n    \"pprint(vars(agent_chain))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 8,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"{'history': [HumanMessage(content='What does this lecture talk about? Summarize it for me', additional_kwargs={}),\\n\",\n      \"             AIMessage(content='\\\\nAI is the art of creating machines that perform functions that require intelligence when performed by humans. AI is the study of the computations that make it possible to perceive, reason, and act. AI is the enterprise of design and analysis of intelligent agents. AI has a long history, beginning with Alan Turing\\\\'s 1950 paper \\\"Computing Machinery and Intelligence\\\" which proposed the Imitation Game as an operational test for intelligent behavior. AI technologies have been applied to a variety of tasks, including information retrieval, data mining, customizable software systems, smart devices, autonomous vehicles, and more. AI has been used to solve mathematical conjectures, plan logistics, and control spacecraft. AI has also been used to create driverless cars, and has seen a resurgence in popularity with the emergence of deep learning and its successful applications in image recognition, natural language processing, speech recognition, and machine translation.', additional_kwargs={})]}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"pprint(agent_chain.memory.load_memory_variables({}))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 10,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"[HumanMessage(content='What does this lecture talk about? Summarize it for me', additional_kwargs={}), AIMessage(content='\\\\nAI is the art of creating machines that perform functions that require intelligence when performed by humans. AI is the study of the computations that make it possible to perceive, reason, and act. AI is the enterprise of design and analysis of intelligent agents. AI has a long history, beginning with Alan Turing\\\\'s 1950 paper \\\"Computing Machinery and Intelligence\\\" which proposed the Imitation Game as an operational test for intelligent behavior. AI technologies have been applied to a variety of tasks, including information retrieval, data mining, customizable software systems, smart devices, autonomous vehicles, and more. AI has been used to solve mathematical conjectures, plan logistics, and control spacecraft. AI has also been used to create driverless cars, and has seen a resurgence in popularity with the emergence of deep learning and its successful applications in image recognition, natural language processing, speech recognition, and machine translation.', additional_kwargs={})]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"print(agent_chain.memory.buffer)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 9,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"ename\": \"ValueError\",\n     \"evalue\": \"Could not parse LLM output: I'm sorry, but as an AI language model, I don't have access to your previous question history. Could you please provide me with your previous question so that I can assist you?\",\n     \"output_type\": \"error\",\n     \"traceback\": [\n      \"\\u001b[0;31m---------------------------------------------------------------------------\\u001b[0m\",\n      \"\\u001b[0;31mValueError\\u001b[0m                                Traceback (most recent call last)\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/agents/chat/base.py:50\\u001b[0m, in \\u001b[0;36mChatAgent._extract_tool_and_input\\u001b[0;34m(self, text)\\u001b[0m\\n\\u001b[1;32m     49\\u001b[0m \\u001b[39mtry\\u001b[39;00m:\\n\\u001b[0;32m---> 50\\u001b[0m     _, action, _ \\u001b[39m=\\u001b[39m text\\u001b[39m.\\u001b[39msplit(\\u001b[39m\\\"\\u001b[39m\\u001b[39m```\\u001b[39m\\u001b[39m\\\"\\u001b[39m)\\n\\u001b[1;32m     51\\u001b[0m     response \\u001b[39m=\\u001b[39m json\\u001b[39m.\\u001b[39mloads(action\\u001b[39m.\\u001b[39mstrip())\\n\",\n      \"\\u001b[0;31mValueError\\u001b[0m: not enough values to unpack (expected 3, got 1)\",\n      \"\\nDuring handling of the above exception, another exception occurred:\\n\",\n      \"\\u001b[0;31mValueError\\u001b[0m                                Traceback (most recent call last)\",\n      \"\\u001b[1;32m/Users/benedictneo/fun/ClassGPT/notebooks/langchain.ipynb Cell 11\\u001b[0m in \\u001b[0;36m<module>\\u001b[0;34m\\u001b[0m\\n\\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/benedictneo/fun/ClassGPT/notebooks/langchain.ipynb#X13sZmlsZQ%3D%3D?line=0'>1</a>\\u001b[0m res \\u001b[39m=\\u001b[39m agent_chain\\u001b[39m.\\u001b[39;49mrun(\\u001b[39minput\\u001b[39;49m\\u001b[39m=\\u001b[39;49m\\u001b[39m\\\"\\u001b[39;49m\\u001b[39mWhat was my previous question?\\u001b[39;49m\\u001b[39m\\\"\\u001b[39;49m)\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/chains/base.py:216\\u001b[0m, in \\u001b[0;36mChain.run\\u001b[0;34m(self, *args, **kwargs)\\u001b[0m\\n\\u001b[1;32m    213\\u001b[0m     \\u001b[39mreturn\\u001b[39;00m \\u001b[39mself\\u001b[39m(args[\\u001b[39m0\\u001b[39m])[\\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39moutput_keys[\\u001b[39m0\\u001b[39m]]\\n\\u001b[1;32m    215\\u001b[0m \\u001b[39mif\\u001b[39;00m kwargs \\u001b[39mand\\u001b[39;00m \\u001b[39mnot\\u001b[39;00m args:\\n\\u001b[0;32m--> 216\\u001b[0m     \\u001b[39mreturn\\u001b[39;00m \\u001b[39mself\\u001b[39;49m(kwargs)[\\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39moutput_keys[\\u001b[39m0\\u001b[39m]]\\n\\u001b[1;32m    218\\u001b[0m \\u001b[39mraise\\u001b[39;00m \\u001b[39mValueError\\u001b[39;00m(\\n\\u001b[1;32m    219\\u001b[0m     \\u001b[39mf\\u001b[39m\\u001b[39m\\\"\\u001b[39m\\u001b[39m`run` supported with either positional arguments or keyword arguments\\u001b[39m\\u001b[39m\\\"\\u001b[39m\\n\\u001b[1;32m    220\\u001b[0m     \\u001b[39mf\\u001b[39m\\u001b[39m\\\"\\u001b[39m\\u001b[39m but not both. Got args: \\u001b[39m\\u001b[39m{\\u001b[39;00margs\\u001b[39m}\\u001b[39;00m\\u001b[39m and kwargs: \\u001b[39m\\u001b[39m{\\u001b[39;00mkwargs\\u001b[39m}\\u001b[39;00m\\u001b[39m.\\u001b[39m\\u001b[39m\\\"\\u001b[39m\\n\\u001b[1;32m    221\\u001b[0m )\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/chains/base.py:116\\u001b[0m, in \\u001b[0;36mChain.__call__\\u001b[0;34m(self, inputs, return_only_outputs)\\u001b[0m\\n\\u001b[1;32m    114\\u001b[0m \\u001b[39mexcept\\u001b[39;00m (\\u001b[39mKeyboardInterrupt\\u001b[39;00m, \\u001b[39mException\\u001b[39;00m) \\u001b[39mas\\u001b[39;00m e:\\n\\u001b[1;32m    115\\u001b[0m     \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mcallback_manager\\u001b[39m.\\u001b[39mon_chain_error(e, verbose\\u001b[39m=\\u001b[39m\\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mverbose)\\n\\u001b[0;32m--> 116\\u001b[0m     \\u001b[39mraise\\u001b[39;00m e\\n\\u001b[1;32m    117\\u001b[0m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mcallback_manager\\u001b[39m.\\u001b[39mon_chain_end(outputs, verbose\\u001b[39m=\\u001b[39m\\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mverbose)\\n\\u001b[1;32m    118\\u001b[0m \\u001b[39mreturn\\u001b[39;00m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mprep_outputs(inputs, outputs, return_only_outputs)\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/chains/base.py:113\\u001b[0m, in \\u001b[0;36mChain.__call__\\u001b[0;34m(self, inputs, return_only_outputs)\\u001b[0m\\n\\u001b[1;32m    107\\u001b[0m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mcallback_manager\\u001b[39m.\\u001b[39mon_chain_start(\\n\\u001b[1;32m    108\\u001b[0m     {\\u001b[39m\\\"\\u001b[39m\\u001b[39mname\\u001b[39m\\u001b[39m\\\"\\u001b[39m: \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39m\\u001b[39m__class__\\u001b[39m\\u001b[39m.\\u001b[39m\\u001b[39m__name__\\u001b[39m},\\n\\u001b[1;32m    109\\u001b[0m     inputs,\\n\\u001b[1;32m    110\\u001b[0m     verbose\\u001b[39m=\\u001b[39m\\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mverbose,\\n\\u001b[1;32m    111\\u001b[0m )\\n\\u001b[1;32m    112\\u001b[0m \\u001b[39mtry\\u001b[39;00m:\\n\\u001b[0;32m--> 113\\u001b[0m     outputs \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39;49m\\u001b[39m.\\u001b[39;49m_call(inputs)\\n\\u001b[1;32m    114\\u001b[0m \\u001b[39mexcept\\u001b[39;00m (\\u001b[39mKeyboardInterrupt\\u001b[39;00m, \\u001b[39mException\\u001b[39;00m) \\u001b[39mas\\u001b[39;00m e:\\n\\u001b[1;32m    115\\u001b[0m     \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mcallback_manager\\u001b[39m.\\u001b[39mon_chain_error(e, verbose\\u001b[39m=\\u001b[39m\\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mverbose)\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/agents/agent.py:505\\u001b[0m, in \\u001b[0;36mAgentExecutor._call\\u001b[0;34m(self, inputs)\\u001b[0m\\n\\u001b[1;32m    503\\u001b[0m \\u001b[39m# We now enter the agent loop (until it returns something).\\u001b[39;00m\\n\\u001b[1;32m    504\\u001b[0m \\u001b[39mwhile\\u001b[39;00m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39m_should_continue(iterations):\\n\\u001b[0;32m--> 505\\u001b[0m     next_step_output \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39;49m\\u001b[39m.\\u001b[39;49m_take_next_step(\\n\\u001b[1;32m    506\\u001b[0m         name_to_tool_map, color_mapping, inputs, intermediate_steps\\n\\u001b[1;32m    507\\u001b[0m     )\\n\\u001b[1;32m    508\\u001b[0m     \\u001b[39mif\\u001b[39;00m \\u001b[39misinstance\\u001b[39m(next_step_output, AgentFinish):\\n\\u001b[1;32m    509\\u001b[0m         \\u001b[39mreturn\\u001b[39;00m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39m_return(next_step_output, intermediate_steps)\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/agents/agent.py:409\\u001b[0m, in \\u001b[0;36mAgentExecutor._take_next_step\\u001b[0;34m(self, name_to_tool_map, color_mapping, inputs, intermediate_steps)\\u001b[0m\\n\\u001b[1;32m    404\\u001b[0m \\u001b[39m\\u001b[39m\\u001b[39m\\\"\\\"\\\"Take a single step in the thought-action-observation loop.\\u001b[39;00m\\n\\u001b[1;32m    405\\u001b[0m \\n\\u001b[1;32m    406\\u001b[0m \\u001b[39mOverride this to take control of how the agent makes and acts on choices.\\u001b[39;00m\\n\\u001b[1;32m    407\\u001b[0m \\u001b[39m\\\"\\\"\\\"\\u001b[39;00m\\n\\u001b[1;32m    408\\u001b[0m \\u001b[39m# Call the LLM to see what to do.\\u001b[39;00m\\n\\u001b[0;32m--> 409\\u001b[0m output \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39;49m\\u001b[39m.\\u001b[39;49magent\\u001b[39m.\\u001b[39;49mplan(intermediate_steps, \\u001b[39m*\\u001b[39;49m\\u001b[39m*\\u001b[39;49minputs)\\n\\u001b[1;32m    410\\u001b[0m \\u001b[39m# If the tool chosen is the finishing tool, then we end and return.\\u001b[39;00m\\n\\u001b[1;32m    411\\u001b[0m \\u001b[39mif\\u001b[39;00m \\u001b[39misinstance\\u001b[39m(output, AgentFinish):\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/agents/agent.py:105\\u001b[0m, in \\u001b[0;36mAgent.plan\\u001b[0;34m(self, intermediate_steps, **kwargs)\\u001b[0m\\n\\u001b[1;32m     94\\u001b[0m \\u001b[39m\\u001b[39m\\u001b[39m\\\"\\\"\\\"Given input, decided what to do.\\u001b[39;00m\\n\\u001b[1;32m     95\\u001b[0m \\n\\u001b[1;32m     96\\u001b[0m \\u001b[39mArgs:\\u001b[39;00m\\n\\u001b[0;32m   (...)\\u001b[0m\\n\\u001b[1;32m    102\\u001b[0m \\u001b[39m    Action specifying what tool to use.\\u001b[39;00m\\n\\u001b[1;32m    103\\u001b[0m \\u001b[39m\\\"\\\"\\\"\\u001b[39;00m\\n\\u001b[1;32m    104\\u001b[0m full_inputs \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mget_full_inputs(intermediate_steps, \\u001b[39m*\\u001b[39m\\u001b[39m*\\u001b[39mkwargs)\\n\\u001b[0;32m--> 105\\u001b[0m action \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39;49m\\u001b[39m.\\u001b[39;49m_get_next_action(full_inputs)\\n\\u001b[1;32m    106\\u001b[0m \\u001b[39mif\\u001b[39;00m action\\u001b[39m.\\u001b[39mtool \\u001b[39m==\\u001b[39m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mfinish_tool_name:\\n\\u001b[1;32m    107\\u001b[0m     \\u001b[39mreturn\\u001b[39;00m AgentFinish({\\u001b[39m\\\"\\u001b[39m\\u001b[39moutput\\u001b[39m\\u001b[39m\\\"\\u001b[39m: action\\u001b[39m.\\u001b[39mtool_input}, action\\u001b[39m.\\u001b[39mlog)\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/agents/agent.py:67\\u001b[0m, in \\u001b[0;36mAgent._get_next_action\\u001b[0;34m(self, full_inputs)\\u001b[0m\\n\\u001b[1;32m     65\\u001b[0m \\u001b[39mdef\\u001b[39;00m \\u001b[39m_get_next_action\\u001b[39m(\\u001b[39mself\\u001b[39m, full_inputs: Dict[\\u001b[39mstr\\u001b[39m, \\u001b[39mstr\\u001b[39m]) \\u001b[39m-\\u001b[39m\\u001b[39m>\\u001b[39m AgentAction:\\n\\u001b[1;32m     66\\u001b[0m     full_output \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39mllm_chain\\u001b[39m.\\u001b[39mpredict(\\u001b[39m*\\u001b[39m\\u001b[39m*\\u001b[39mfull_inputs)\\n\\u001b[0;32m---> 67\\u001b[0m     parsed_output \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39;49m\\u001b[39m.\\u001b[39;49m_extract_tool_and_input(full_output)\\n\\u001b[1;32m     68\\u001b[0m     \\u001b[39mwhile\\u001b[39;00m parsed_output \\u001b[39mis\\u001b[39;00m \\u001b[39mNone\\u001b[39;00m:\\n\\u001b[1;32m     69\\u001b[0m         full_output \\u001b[39m=\\u001b[39m \\u001b[39mself\\u001b[39m\\u001b[39m.\\u001b[39m_fix_text(full_output)\\n\",\n      \"File \\u001b[0;32m~/miniforge3/lib/python3.9/site-packages/langchain/agents/chat/base.py:55\\u001b[0m, in \\u001b[0;36mChatAgent._extract_tool_and_input\\u001b[0;34m(self, text)\\u001b[0m\\n\\u001b[1;32m     52\\u001b[0m     \\u001b[39mreturn\\u001b[39;00m response[\\u001b[39m\\\"\\u001b[39m\\u001b[39maction\\u001b[39m\\u001b[39m\\\"\\u001b[39m], response[\\u001b[39m\\\"\\u001b[39m\\u001b[39maction_input\\u001b[39m\\u001b[39m\\\"\\u001b[39m]\\n\\u001b[1;32m     54\\u001b[0m \\u001b[39mexcept\\u001b[39;00m \\u001b[39mException\\u001b[39;00m:\\n\\u001b[0;32m---> 55\\u001b[0m     \\u001b[39mraise\\u001b[39;00m \\u001b[39mValueError\\u001b[39;00m(\\u001b[39mf\\u001b[39m\\u001b[39m\\\"\\u001b[39m\\u001b[39mCould not parse LLM output: \\u001b[39m\\u001b[39m{\\u001b[39;00mtext\\u001b[39m}\\u001b[39;00m\\u001b[39m\\\"\\u001b[39m)\\n\",\n      \"\\u001b[0;31mValueError\\u001b[0m: Could not parse LLM output: I'm sorry, but as an AI language model, I don't have access to your previous question history. Could you please provide me with your previous question so that I can assist you?\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"res = agent_chain.run(input=\\\"What was my previous question?\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 16,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:llama_index.token_counter.token_counter:> [query] Total LLM token usage: 1786 tokens\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"> [query] Total LLM token usage: 1786 tokens\\n\"\n     ]\n    },\n    {\n     \"name\": \"stderr\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"INFO:llama_index.token_counter.token_counter:> [query] Total embedding token usage: 4 tokens\\n\"\n     ]\n    },\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"> [query] Total embedding token usage: 4 tokens\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"res = agent_chain.run(input=\\\"Give me 3 practice test questions based on these slides with answers\\\")\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 17,\n   \"metadata\": {},\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/markdown\": [\n       \"<b>\\n\",\n       \"Artificial Intelligence (AI) is the art of creating machines that perform functions that require intelligence when performed by humans. AI is the study of the computations that make it possible to perceive, reason, and act. AI is the enterprise of designing and analyzing intelligent agents. AI is concerned with thought processes/reasoning or behavior, and can model humans or measure against an ideal concept of intelligence, rationality.</b>\"\n      ],\n      \"text/plain\": [\n       \"<IPython.core.display.Markdown object>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"display(Markdown(f\\\"<b>{res}</b>\\\"))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"base\",\n   \"language\": \"python\",\n   \"name\": \"python3\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.9.10\"\n  },\n  \"orig_nbformat\": 4\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "requirements.txt",
    "content": "boto3==1.24.96\nbotocore==1.27.96\nlangchain==0.0.121\nllama_index==0.4.35.post1\nopenai==0.27.0\npypdf==3.6.0\npython-dotenv==1.0.0\nstreamlit==1.20.0\n"
  }
]